From 35ce7273681713553da17292126475b9a7b19baf Mon Sep 17 00:00:00 2001 From: jfrery Date: Wed, 1 May 2024 13:13:12 +0200 Subject: [PATCH 1/9] feat: support resnet --- src/concrete/ml/quantization/post_training.py | 12 +- use_case_examples/resnet/README.md | 17 + .../resnet/folder_index_class.txt | 1000 +++++++++ use_case_examples/resnet/imagenet_classes.txt | 1000 +++++++++ use_case_examples/resnet/resnet.py | 706 ++++++ use_case_examples/resnet/resnet_fhe.ipynb | 1989 +++++++++++++++++ 6 files changed, 4718 insertions(+), 6 deletions(-) create mode 100644 use_case_examples/resnet/README.md create mode 100644 use_case_examples/resnet/folder_index_class.txt create mode 100644 use_case_examples/resnet/imagenet_classes.txt create mode 100644 use_case_examples/resnet/resnet.py create mode 100644 use_case_examples/resnet/resnet_fhe.ipynb diff --git a/src/concrete/ml/quantization/post_training.py b/src/concrete/ml/quantization/post_training.py index 456d839bd..9ef020c74 100644 --- a/src/concrete/ml/quantization/post_training.py +++ b/src/concrete/ml/quantization/post_training.py @@ -177,12 +177,12 @@ def get_n_bits_dict(n_bits: Union[int, Dict[str, int]]) -> Dict[str, int]: n_bits_dict.update(n_bits) - assert_true( - n_bits_dict["model_outputs"] >= n_bits_dict["op_inputs"], - "Using fewer bits to represent the model_outputs than the op inputs is not " - f"recommended. Got model_outputs: {n_bits_dict['model_outputs']} and op_inputs: " - f"{n_bits_dict['op_inputs']}", - ) + # assert_true( + # n_bits_dict["model_outputs"] >= n_bits_dict["op_inputs"], + # "Using fewer bits to represent the model_outputs than the op inputs is not " + # f"recommended. Got model_outputs: {n_bits_dict['model_outputs']} and op_inputs: " + # f"{n_bits_dict['op_inputs']}", + # ) return n_bits_dict diff --git a/use_case_examples/resnet/README.md b/use_case_examples/resnet/README.md new file mode 100644 index 000000000..c5719e1ec --- /dev/null +++ b/use_case_examples/resnet/README.md @@ -0,0 +1,17 @@ +# ResNet in FHE + +## Overview + +`resnet.py` is taken from torchvision https://github.com/pytorch/vision/blob/main/torchvision/models/resnet.py. + +The main modification is the replacement of the adaptive average pooling layer with a standard average pooling layer. + +```diff +- self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) ++ self.avgpool = nn.AvgPool2d(kernel_size=7, stride=1, padding=0) +``` + +Concrete ML does not support `AdaptiveAvgPool2d` yet. + +`resnet_fhe.ipynb` is a notebook that demonstrates how to use Concrete ML to compile and run a ResNet model in FHE along with some figures to show the accuracy vs different bit-width. + diff --git a/use_case_examples/resnet/folder_index_class.txt b/use_case_examples/resnet/folder_index_class.txt new file mode 100644 index 000000000..1efe1b5a5 --- /dev/null +++ b/use_case_examples/resnet/folder_index_class.txt @@ -0,0 +1,1000 @@ +n02119789 1 kit_fox +n02100735 2 English_setter +n02110185 3 Siberian_husky +n02096294 4 Australian_terrier +n02102040 5 English_springer +n02066245 6 grey_whale +n02509815 7 lesser_panda +n02124075 8 Egyptian_cat +n02417914 9 ibex +n02123394 10 Persian_cat +n02125311 11 cougar +n02423022 12 gazelle +n02346627 13 porcupine +n02077923 14 sea_lion +n02110063 15 malamute +n02447366 16 badger +n02109047 17 Great_Dane +n02089867 18 Walker_hound +n02102177 19 Welsh_springer_spaniel +n02091134 20 whippet +n02092002 21 Scottish_deerhound +n02071294 22 killer_whale +n02442845 23 mink +n02504458 24 African_elephant +n02092339 25 Weimaraner +n02098105 26 soft-coated_wheaten_terrier +n02096437 27 Dandie_Dinmont +n02114712 28 red_wolf +n02105641 29 Old_English_sheepdog +n02128925 30 jaguar +n02091635 31 otterhound +n02088466 32 bloodhound +n02096051 33 Airedale +n02117135 34 hyena +n02138441 35 meerkat +n02097130 36 giant_schnauzer +n02493509 37 titi +n02457408 38 three-toed_sloth +n02389026 39 sorrel +n02443484 40 black-footed_ferret +n02110341 41 dalmatian +n02089078 42 black-and-tan_coonhound +n02086910 43 papillon +n02445715 44 skunk +n02093256 45 Staffordshire_bullterrier +n02113978 46 Mexican_hairless +n02106382 47 Bouvier_des_Flandres +n02441942 48 weasel +n02113712 49 miniature_poodle +n02113186 50 Cardigan +n02105162 51 malinois +n02415577 52 bighorn +n02356798 53 fox_squirrel +n02488702 54 colobus +n02123159 55 tiger_cat +n02098413 56 Lhasa +n02422699 57 impala +n02114855 58 coyote +n02094433 59 Yorkshire_terrier +n02111277 60 Newfoundland +n02132136 61 brown_bear +n02119022 62 red_fox +n02091467 63 Norwegian_elkhound +n02106550 64 Rottweiler +n02422106 65 hartebeest +n02091831 66 Saluki +n02120505 67 grey_fox +n02104365 68 schipperke +n02086079 69 Pekinese +n02112706 70 Brabancon_griffon +n02098286 71 West_Highland_white_terrier +n02095889 72 Sealyham_terrier +n02484975 73 guenon +n02137549 74 mongoose +n02500267 75 indri +n02129604 76 tiger +n02090721 77 Irish_wolfhound +n02396427 78 wild_boar +n02108000 79 EntleBucher +n02391049 80 zebra +n02412080 81 ram +n02108915 82 French_bulldog +n02480495 83 orangutan +n02110806 84 basenji +n02128385 85 leopard +n02107683 86 Bernese_mountain_dog +n02085936 87 Maltese_dog +n02094114 88 Norfolk_terrier +n02087046 89 toy_terrier +n02100583 90 vizsla +n02096177 91 cairn +n02494079 92 squirrel_monkey +n02105056 93 groenendael +n02101556 94 clumber +n02123597 95 Siamese_cat +n02481823 96 chimpanzee +n02105505 97 komondor +n02088094 98 Afghan_hound +n02085782 99 Japanese_spaniel +n02489166 100 proboscis_monkey +n02364673 101 guinea_pig +n02114548 102 white_wolf +n02134084 103 ice_bear +n02480855 104 gorilla +n02090622 105 borzoi +n02113624 106 toy_poodle +n02093859 107 Kerry_blue_terrier +n02403003 108 ox +n02097298 109 Scotch_terrier +n02108551 110 Tibetan_mastiff +n02493793 111 spider_monkey +n02107142 112 Doberman +n02096585 113 Boston_bull +n02107574 114 Greater_Swiss_Mountain_dog +n02107908 115 Appenzeller +n02086240 116 Shih-Tzu +n02102973 117 Irish_water_spaniel +n02112018 118 Pomeranian +n02093647 119 Bedlington_terrier +n02397096 120 warthog +n02437312 121 Arabian_camel +n02483708 122 siamang +n02097047 123 miniature_schnauzer +n02106030 124 collie +n02099601 125 golden_retriever +n02093991 126 Irish_terrier +n02110627 127 affenpinscher +n02106166 128 Border_collie +n02326432 129 hare +n02108089 130 boxer +n02097658 131 silky_terrier +n02088364 132 beagle +n02111129 133 Leonberg +n02100236 134 German_short-haired_pointer +n02486261 135 patas +n02115913 136 dhole +n02486410 137 baboon +n02487347 138 macaque +n02099849 139 Chesapeake_Bay_retriever +n02108422 140 bull_mastiff +n02104029 141 kuvasz +n02492035 142 capuchin +n02110958 143 pug +n02099429 144 curly-coated_retriever +n02094258 145 Norwich_terrier +n02099267 146 flat-coated_retriever +n02395406 147 hog +n02112350 148 keeshond +n02109961 149 Eskimo_dog +n02101388 150 Brittany_spaniel +n02113799 151 standard_poodle +n02095570 152 Lakeland_terrier +n02128757 153 snow_leopard +n02101006 154 Gordon_setter +n02115641 155 dingo +n02097209 156 standard_schnauzer +n02342885 157 hamster +n02097474 158 Tibetan_terrier +n02120079 159 Arctic_fox +n02095314 160 wire-haired_fox_terrier +n02088238 161 basset +n02408429 162 water_buffalo +n02133161 163 American_black_bear +n02328150 164 Angora +n02410509 165 bison +n02492660 166 howler_monkey +n02398521 167 hippopotamus +n02112137 168 chow +n02510455 169 giant_panda +n02093428 170 American_Staffordshire_terrier +n02105855 171 Shetland_sheepdog +n02111500 172 Great_Pyrenees +n02085620 173 Chihuahua +n02123045 174 tabby +n02490219 175 marmoset +n02099712 176 Labrador_retriever +n02109525 177 Saint_Bernard +n02454379 178 armadillo +n02111889 179 Samoyed +n02088632 180 bluetick +n02090379 181 redbone +n02443114 182 polecat +n02361337 183 marmot +n02105412 184 kelpie +n02483362 185 gibbon +n02437616 186 llama +n02107312 187 miniature_pinscher +n02325366 188 wood_rabbit +n02091032 189 Italian_greyhound +n02129165 190 lion +n02102318 191 cocker_spaniel +n02100877 192 Irish_setter +n02074367 193 dugong +n02504013 194 Indian_elephant +n02363005 195 beaver +n02102480 196 Sussex_spaniel +n02113023 197 Pembroke +n02086646 198 Blenheim_spaniel +n02497673 199 Madagascar_cat +n02087394 200 Rhodesian_ridgeback +n02127052 201 lynx +n02116738 202 African_hunting_dog +n02488291 203 langur +n02091244 204 Ibizan_hound +n02114367 205 timber_wolf +n02130308 206 cheetah +n02089973 207 English_foxhound +n02105251 208 briard +n02134418 209 sloth_bear +n02093754 210 Border_terrier +n02106662 211 German_shepherd +n02444819 212 otter +n01882714 213 koala +n01871265 214 tusker +n01872401 215 echidna +n01877812 216 wallaby +n01873310 217 platypus +n01883070 218 wombat +n04086273 219 revolver +n04507155 220 umbrella +n04147183 221 schooner +n04254680 222 soccer_ball +n02672831 223 accordion +n02219486 224 ant +n02317335 225 starfish +n01968897 226 chambered_nautilus +n03452741 227 grand_piano +n03642806 228 laptop +n07745940 229 strawberry +n02690373 230 airliner +n04552348 231 warplane +n02692877 232 airship +n02782093 233 balloon +n04266014 234 space_shuttle +n03344393 235 fireboat +n03447447 236 gondola +n04273569 237 speedboat +n03662601 238 lifeboat +n02951358 239 canoe +n04612504 240 yawl +n02981792 241 catamaran +n04483307 242 trimaran +n03095699 243 container_ship +n03673027 244 liner +n03947888 245 pirate +n02687172 246 aircraft_carrier +n04347754 247 submarine +n04606251 248 wreck +n03478589 249 half_track +n04389033 250 tank +n03773504 251 missile +n02860847 252 bobsled +n03218198 253 dogsled +n02835271 254 bicycle-built-for-two +n03792782 255 mountain_bike +n03393912 256 freight_car +n03895866 257 passenger_car +n02797295 258 barrow +n04204347 259 shopping_cart +n03791053 260 motor_scooter +n03384352 261 forklift +n03272562 262 electric_locomotive +n04310018 263 steam_locomotive +n02704792 264 amphibian +n02701002 265 ambulance +n02814533 266 beach_wagon +n02930766 267 cab +n03100240 268 convertible +n03594945 269 jeep +n03670208 270 limousine +n03770679 271 minivan +n03777568 272 Model_T +n04037443 273 racer +n04285008 274 sports_car +n03444034 275 go-kart +n03445924 276 golfcart +n03785016 277 moped +n04252225 278 snowplow +n03345487 279 fire_engine +n03417042 280 garbage_truck +n03930630 281 pickup +n04461696 282 tow_truck +n04467665 283 trailer_truck +n03796401 284 moving_van +n03977966 285 police_van +n04065272 286 recreational_vehicle +n04335435 287 streetcar +n04252077 288 snowmobile +n04465501 289 tractor +n03776460 290 mobile_home +n04482393 291 tricycle +n04509417 292 unicycle +n03538406 293 horse_cart +n03599486 294 jinrikisha +n03868242 295 oxcart +n02804414 296 bassinet +n03125729 297 cradle +n03131574 298 crib +n03388549 299 four-poster +n02870880 300 bookcase +n03018349 301 china_cabinet +n03742115 302 medicine_chest +n03016953 303 chiffonier +n04380533 304 table_lamp +n03337140 305 file +n03891251 306 park_bench +n02791124 307 barber_chair +n04429376 308 throne +n03376595 309 folding_chair +n04099969 310 rocking_chair +n04344873 311 studio_couch +n04447861 312 toilet_seat +n03179701 313 desk +n03982430 314 pool_table +n03201208 315 dining_table +n03290653 316 entertainment_center +n04550184 317 wardrobe +n07742313 318 Granny_Smith +n07747607 319 orange +n07749582 320 lemon +n07753113 321 fig +n07753275 322 pineapple +n07753592 323 banana +n07754684 324 jackfruit +n07760859 325 custard_apple +n07768694 326 pomegranate +n12267677 327 acorn +n12620546 328 hip +n13133613 329 ear +n11879895 330 rapeseed +n12144580 331 corn +n12768682 332 buckeye +n03854065 333 organ +n04515003 334 upright +n03017168 335 chime +n03249569 336 drum +n03447721 337 gong +n03720891 338 maraca +n03721384 339 marimba +n04311174 340 steel_drum +n02787622 341 banjo +n02992211 342 cello +n04536866 343 violin +n03495258 344 harp +n02676566 345 acoustic_guitar +n03272010 346 electric_guitar +n03110669 347 cornet +n03394916 348 French_horn +n04487394 349 trombone +n03494278 350 harmonica +n03840681 351 ocarina +n03884397 352 panpipe +n02804610 353 bassoon +n03838899 354 oboe +n04141076 355 sax +n03372029 356 flute +n11939491 357 daisy +n12057211 358 yellow_lady's_slipper +n09246464 359 cliff +n09468604 360 valley +n09193705 361 alp +n09472597 362 volcano +n09399592 363 promontory +n09421951 364 sandbar +n09256479 365 coral_reef +n09332890 366 lakeside +n09428293 367 seashore +n09288635 368 geyser +n03498962 369 hatchet +n03041632 370 cleaver +n03658185 371 letter_opener +n03954731 372 plane +n03995372 373 power_drill +n03649909 374 lawn_mower +n03481172 375 hammer +n03109150 376 corkscrew +n02951585 377 can_opener +n03970156 378 plunger +n04154565 379 screwdriver +n04208210 380 shovel +n03967562 381 plow +n03000684 382 chain_saw +n01514668 383 cock +n01514859 384 hen +n01518878 385 ostrich +n01530575 386 brambling +n01531178 387 goldfinch +n01532829 388 house_finch +n01534433 389 junco +n01537544 390 indigo_bunting +n01558993 391 robin +n01560419 392 bulbul +n01580077 393 jay +n01582220 394 magpie +n01592084 395 chickadee +n01601694 396 water_ouzel +n01608432 397 kite +n01614925 398 bald_eagle +n01616318 399 vulture +n01622779 400 great_grey_owl +n01795545 401 black_grouse +n01796340 402 ptarmigan +n01797886 403 ruffed_grouse +n01798484 404 prairie_chicken +n01806143 405 peacock +n01806567 406 quail +n01807496 407 partridge +n01817953 408 African_grey +n01818515 409 macaw +n01819313 410 sulphur-crested_cockatoo +n01820546 411 lorikeet +n01824575 412 coucal +n01828970 413 bee_eater +n01829413 414 hornbill +n01833805 415 hummingbird +n01843065 416 jacamar +n01843383 417 toucan +n01847000 418 drake +n01855032 419 red-breasted_merganser +n01855672 420 goose +n01860187 421 black_swan +n02002556 422 white_stork +n02002724 423 black_stork +n02006656 424 spoonbill +n02007558 425 flamingo +n02009912 426 American_egret +n02009229 427 little_blue_heron +n02011460 428 bittern +n02012849 429 crane +n02013706 430 limpkin +n02018207 431 American_coot +n02018795 432 bustard +n02025239 433 ruddy_turnstone +n02027492 434 red-backed_sandpiper +n02028035 435 redshank +n02033041 436 dowitcher +n02037110 437 oystercatcher +n02017213 438 European_gallinule +n02051845 439 pelican +n02056570 440 king_penguin +n02058221 441 albatross +n01484850 442 great_white_shark +n01491361 443 tiger_shark +n01494475 444 hammerhead +n01496331 445 electric_ray +n01498041 446 stingray +n02514041 447 barracouta +n02536864 448 coho +n01440764 449 tench +n01443537 450 goldfish +n02526121 451 eel +n02606052 452 rock_beauty +n02607072 453 anemone_fish +n02643566 454 lionfish +n02655020 455 puffer +n02640242 456 sturgeon +n02641379 457 gar +n01664065 458 loggerhead +n01665541 459 leatherback_turtle +n01667114 460 mud_turtle +n01667778 461 terrapin +n01669191 462 box_turtle +n01675722 463 banded_gecko +n01677366 464 common_iguana +n01682714 465 American_chameleon +n01685808 466 whiptail +n01687978 467 agama +n01688243 468 frilled_lizard +n01689811 469 alligator_lizard +n01692333 470 Gila_monster +n01693334 471 green_lizard +n01694178 472 African_chameleon +n01695060 473 Komodo_dragon +n01704323 474 triceratops +n01697457 475 African_crocodile +n01698640 476 American_alligator +n01728572 477 thunder_snake +n01728920 478 ringneck_snake +n01729322 479 hognose_snake +n01729977 480 green_snake +n01734418 481 king_snake +n01735189 482 garter_snake +n01737021 483 water_snake +n01739381 484 vine_snake +n01740131 485 night_snake +n01742172 486 boa_constrictor +n01744401 487 rock_python +n01748264 488 Indian_cobra +n01749939 489 green_mamba +n01751748 490 sea_snake +n01753488 491 horned_viper +n01755581 492 diamondback +n01756291 493 sidewinder +n01629819 494 European_fire_salamander +n01630670 495 common_newt +n01631663 496 eft +n01632458 497 spotted_salamander +n01632777 498 axolotl +n01641577 499 bullfrog +n01644373 500 tree_frog +n01644900 501 tailed_frog +n04579432 502 whistle +n04592741 503 wing +n03876231 504 paintbrush +n03483316 505 hand_blower +n03868863 506 oxygen_mask +n04251144 507 snorkel +n03691459 508 loudspeaker +n03759954 509 microphone +n04152593 510 screen +n03793489 511 mouse +n03271574 512 electric_fan +n03843555 513 oil_filter +n04332243 514 strainer +n04265275 515 space_heater +n04330267 516 stove +n03467068 517 guillotine +n02794156 518 barometer +n04118776 519 rule +n03841143 520 odometer +n04141975 521 scale +n02708093 522 analog_clock +n03196217 523 digital_clock +n04548280 524 wall_clock +n03544143 525 hourglass +n04355338 526 sundial +n03891332 527 parking_meter +n04328186 528 stopwatch +n03197337 529 digital_watch +n04317175 530 stethoscope +n04376876 531 syringe +n03706229 532 magnetic_compass +n02841315 533 binoculars +n04009552 534 projector +n04356056 535 sunglasses +n03692522 536 loupe +n04044716 537 radio_telescope +n02879718 538 bow +n02950826 539 cannon +n02749479 540 assault_rifle +n04090263 541 rifle +n04008634 542 projectile +n03085013 543 computer_keyboard +n04505470 544 typewriter_keyboard +n03126707 545 crane +n03666591 546 lighter +n02666196 547 abacus +n02977058 548 cash_machine +n04238763 549 slide_rule +n03180011 550 desktop_computer +n03485407 551 hand-held_computer +n03832673 552 notebook +n06359193 553 web_site +n03496892 554 harvester +n04428191 555 thresher +n04004767 556 printer +n04243546 557 slot +n04525305 558 vending_machine +n04179913 559 sewing_machine +n03602883 560 joystick +n04372370 561 switch +n03532672 562 hook +n02974003 563 car_wheel +n03874293 564 paddlewheel +n03944341 565 pinwheel +n03992509 566 potter's_wheel +n03425413 567 gas_pump +n02966193 568 carousel +n04371774 569 swing +n04067472 570 reel +n04040759 571 radiator +n04019541 572 puck +n03492542 573 hard_disc +n04355933 574 sunglass +n03929660 575 pick +n02965783 576 car_mirror +n04258138 577 solar_dish +n04074963 578 remote_control +n03208938 579 disk_brake +n02910353 580 buckle +n03476684 581 hair_slide +n03627232 582 knot +n03075370 583 combination_lock +n03874599 584 padlock +n03804744 585 nail +n04127249 586 safety_pin +n04153751 587 screw +n03803284 588 muzzle +n04162706 589 seat_belt +n04228054 590 ski +n02948072 591 candle +n03590841 592 jack-o'-lantern +n04286575 593 spotlight +n04456115 594 torch +n03814639 595 neck_brace +n03933933 596 pier +n04485082 597 tripod +n03733131 598 maypole +n03794056 599 mousetrap +n04275548 600 spider_web +n01768244 601 trilobite +n01770081 602 harvestman +n01770393 603 scorpion +n01773157 604 black_and_gold_garden_spider +n01773549 605 barn_spider +n01773797 606 garden_spider +n01774384 607 black_widow +n01774750 608 tarantula +n01775062 609 wolf_spider +n01776313 610 tick +n01784675 611 centipede +n01990800 612 isopod +n01978287 613 Dungeness_crab +n01978455 614 rock_crab +n01980166 615 fiddler_crab +n01981276 616 king_crab +n01983481 617 American_lobster +n01984695 618 spiny_lobster +n01985128 619 crayfish +n01986214 620 hermit_crab +n02165105 621 tiger_beetle +n02165456 622 ladybug +n02167151 623 ground_beetle +n02168699 624 long-horned_beetle +n02169497 625 leaf_beetle +n02172182 626 dung_beetle +n02174001 627 rhinoceros_beetle +n02177972 628 weevil +n02190166 629 fly +n02206856 630 bee +n02226429 631 grasshopper +n02229544 632 cricket +n02231487 633 walking_stick +n02233338 634 cockroach +n02236044 635 mantis +n02256656 636 cicada +n02259212 637 leafhopper +n02264363 638 lacewing +n02268443 639 dragonfly +n02268853 640 damselfly +n02276258 641 admiral +n02277742 642 ringlet +n02279972 643 monarch +n02280649 644 cabbage_butterfly +n02281406 645 sulphur_butterfly +n02281787 646 lycaenid +n01910747 647 jellyfish +n01914609 648 sea_anemone +n01917289 649 brain_coral +n01924916 650 flatworm +n01930112 651 nematode +n01943899 652 conch +n01944390 653 snail +n01945685 654 slug +n01950731 655 sea_slug +n01955084 656 chiton +n02319095 657 sea_urchin +n02321529 658 sea_cucumber +n03584829 659 iron +n03297495 660 espresso_maker +n03761084 661 microwave +n03259280 662 Dutch_oven +n04111531 663 rotisserie +n04442312 664 toaster +n04542943 665 waffle_iron +n04517823 666 vacuum +n03207941 667 dishwasher +n04070727 668 refrigerator +n04554684 669 washer +n03133878 670 Crock_Pot +n03400231 671 frying_pan +n04596742 672 wok +n02939185 673 caldron +n03063689 674 coffeepot +n04398044 675 teapot +n04270147 676 spatula +n02699494 677 altar +n04486054 678 triumphal_arch +n03899768 679 patio +n04311004 680 steel_arch_bridge +n04366367 681 suspension_bridge +n04532670 682 viaduct +n02793495 683 barn +n03457902 684 greenhouse +n03877845 685 palace +n03781244 686 monastery +n03661043 687 library +n02727426 688 apiary +n02859443 689 boathouse +n03028079 690 church +n03788195 691 mosque +n04346328 692 stupa +n03956157 693 planetarium +n04081281 694 restaurant +n03032252 695 cinema +n03529860 696 home_theater +n03697007 697 lumbermill +n03065424 698 coil +n03837869 699 obelisk +n04458633 700 totem_pole +n02980441 701 castle +n04005630 702 prison +n03461385 703 grocery_store +n02776631 704 bakery +n02791270 705 barbershop +n02871525 706 bookshop +n02927161 707 butcher_shop +n03089624 708 confectionery +n04200800 709 shoe_shop +n04443257 710 tobacco_shop +n04462240 711 toyshop +n03388043 712 fountain +n03042490 713 cliff_dwelling +n04613696 714 yurt +n03216828 715 dock +n02892201 716 brass +n03743016 717 megalith +n02788148 718 bannister +n02894605 719 breakwater +n03160309 720 dam +n03000134 721 chainlink_fence +n03930313 722 picket_fence +n04604644 723 worm_fence +n04326547 724 stone_wall +n03459775 725 grille +n04239074 726 sliding_door +n04501370 727 turnstile +n03792972 728 mountain_tent +n04149813 729 scoreboard +n03530642 730 honeycomb +n03961711 731 plate_rack +n03903868 732 pedestal +n02814860 733 beacon +n07711569 734 mashed_potato +n07720875 735 bell_pepper +n07714571 736 head_cabbage +n07714990 737 broccoli +n07715103 738 cauliflower +n07716358 739 zucchini +n07716906 740 spaghetti_squash +n07717410 741 acorn_squash +n07717556 742 butternut_squash +n07718472 743 cucumber +n07718747 744 artichoke +n07730033 745 cardoon +n07734744 746 mushroom +n04209239 747 shower_curtain +n03594734 748 jean +n02971356 749 carton +n03485794 750 handkerchief +n04133789 751 sandal +n02747177 752 ashcan +n04125021 753 safe +n07579787 754 plate +n03814906 755 necklace +n03134739 756 croquet_ball +n03404251 757 fur_coat +n04423845 758 thimble +n03877472 759 pajama +n04120489 760 running_shoe +n03062245 761 cocktail_shaker +n03014705 762 chest +n03717622 763 manhole_cover +n03777754 764 modem +n04493381 765 tub +n04476259 766 tray +n02777292 767 balance_beam +n07693725 768 bagel +n03998194 769 prayer_rug +n03617480 770 kimono +n07590611 771 hot_pot +n04579145 772 whiskey_jug +n03623198 773 knee_pad +n07248320 774 book_jacket +n04277352 775 spindle +n04229816 776 ski_mask +n02823428 777 beer_bottle +n03127747 778 crash_helmet +n02877765 779 bottlecap +n04435653 780 tile_roof +n03724870 781 mask +n03710637 782 maillot +n03920288 783 Petri_dish +n03379051 784 football_helmet +n02807133 785 bathing_cap +n04399382 786 teddy +n03527444 787 holster +n03983396 788 pop_bottle +n03924679 789 photocopier +n04532106 790 vestment +n06785654 791 crossword_puzzle +n03445777 792 golf_ball +n07613480 793 trifle +n04350905 794 suit +n04562935 795 water_tower +n03325584 796 feather_boa +n03045698 797 cloak +n07892512 798 red_wine +n03250847 799 drumstick +n04192698 800 shield +n03026506 801 Christmas_stocking +n03534580 802 hoopskirt +n07565083 803 menu +n04296562 804 stage +n02869837 805 bonnet +n07871810 806 meat_loaf +n02799071 807 baseball +n03314780 808 face_powder +n04141327 809 scabbard +n04357314 810 sunscreen +n02823750 811 beer_glass +n13052670 812 hen-of-the-woods +n07583066 813 guacamole +n03637318 814 lampshade +n04599235 815 wool +n07802026 816 hay +n02883205 817 bow_tie +n03709823 818 mailbag +n04560804 819 water_jug +n02909870 820 bucket +n03207743 821 dishrag +n04263257 822 soup_bowl +n07932039 823 eggnog +n03786901 824 mortar +n04479046 825 trench_coat +n03873416 826 paddle +n02999410 827 chain +n04367480 828 swab +n03775546 829 mixing_bowl +n07875152 830 potpie +n04591713 831 wine_bottle +n04201297 832 shoji +n02916936 833 bulletproof_vest +n03240683 834 drilling_platform +n02840245 835 binder +n02963159 836 cardigan +n04370456 837 sweatshirt +n03991062 838 pot +n02843684 839 birdhouse +n03482405 840 hamper +n03942813 841 ping-pong_ball +n03908618 842 pencil_box +n03902125 843 pay-phone +n07584110 844 consomme +n02730930 845 apron +n04023962 846 punching_bag +n02769748 847 backpack +n10148035 848 groom +n02817516 849 bearskin +n03908714 850 pencil_sharpener +n02906734 851 broom +n03788365 852 mosquito_net +n02667093 853 abaya +n03787032 854 mortarboard +n03980874 855 poncho +n03141823 856 crutch +n03976467 857 Polaroid_camera +n04264628 858 space_bar +n07930864 859 cup +n04039381 860 racket +n06874185 861 traffic_light +n04033901 862 quill +n04041544 863 radio +n07860988 864 dough +n03146219 865 cuirass +n03763968 866 military_uniform +n03676483 867 lipstick +n04209133 868 shower_cap +n03782006 869 monitor +n03857828 870 oscilloscope +n03775071 871 mitten +n02892767 872 brassiere +n07684084 873 French_loaf +n04522168 874 vase +n03764736 875 milk_can +n04118538 876 rugby_ball +n03887697 877 paper_towel +n13044778 878 earthstar +n03291819 879 envelope +n03770439 880 miniskirt +n03124170 881 cowboy_hat +n04487081 882 trolleybus +n03916031 883 perfume +n02808440 884 bathtub +n07697537 885 hotdog +n12985857 886 coral_fungus +n02917067 887 bullet_train +n03938244 888 pillow +n15075141 889 toilet_tissue +n02978881 890 cassette +n02966687 891 carpenter's_kit +n03633091 892 ladle +n13040303 893 stinkhorn +n03690938 894 lotion +n03476991 895 hair_spray +n02669723 896 academic_gown +n03220513 897 dome +n03127925 898 crate +n04584207 899 wig +n07880968 900 burrito +n03937543 901 pill_bottle +n03000247 902 chain_mail +n04418357 903 theater_curtain +n04590129 904 window_shade +n02795169 905 barrel +n04553703 906 washbasin +n02783161 907 ballpoint +n02802426 908 basketball +n02808304 909 bath_towel +n03124043 910 cowboy_boot +n03450230 911 gown +n04589890 912 window_screen +n12998815 913 agaric +n02992529 914 cellular_telephone +n03825788 915 nipple +n02790996 916 barbell +n03710193 917 mailbox +n03630383 918 lab_coat +n03347037 919 fire_screen +n03769881 920 minibus +n03871628 921 packet +n03733281 922 maze +n03976657 923 pole +n03535780 924 horizontal_bar +n04259630 925 sombrero +n03929855 926 pickelhaube +n04049303 927 rain_barrel +n04548362 928 wallet +n02979186 929 cassette_player +n06596364 930 comic_book +n03935335 931 piggy_bank +n06794110 932 street_sign +n02825657 933 bell_cote +n03388183 934 fountain_pen +n04591157 935 Windsor_tie +n04540053 936 volleyball +n03866082 937 overskirt +n04136333 938 sarong +n04026417 939 purse +n02865351 940 bolo_tie +n02834397 941 bib +n03888257 942 parachute +n04235860 943 sleeping_bag +n04404412 944 television +n04371430 945 swimming_trunks +n03733805 946 measuring_cup +n07920052 947 espresso +n07873807 948 pizza +n02895154 949 breastplate +n04204238 950 shopping_basket +n04597913 951 wooden_spoon +n04131690 952 saltshaker +n07836838 953 chocolate_sauce +n09835506 954 ballplayer +n03443371 955 goblet +n13037406 956 gyromitra +n04336792 957 stretcher +n04557648 958 water_bottle +n03187595 959 dial_telephone +n04254120 960 soap_dispenser +n03595614 961 jersey +n04146614 962 school_bus +n03598930 963 jigsaw_puzzle +n03958227 964 plastic_bag +n04069434 965 reflex_camera +n03188531 966 diaper +n02786058 967 Band_Aid +n07615774 968 ice_lolly +n04525038 969 velvet +n04409515 970 tennis_ball +n03424325 971 gasmask +n03223299 972 doormat +n03680355 973 Loafer +n07614500 974 ice_cream +n07695742 975 pretzel +n04033995 976 quilt +n03710721 977 maillot +n04392985 978 tape_player +n03047690 979 clog +n03584254 980 iPod +n13054560 981 bolete +n10565667 982 scuba_diver +n03950228 983 pitcher +n03729826 984 matchstick +n02837789 985 bikini +n04254777 986 sock +n02988304 987 CD_player +n03657121 988 lens_cap +n04417672 989 thatch +n04523525 990 vault +n02815834 991 beaker +n09229709 992 bubble +n07697313 993 cheeseburger +n03888605 994 parallel_bars +n03355925 995 flagpole +n03063599 996 coffee_mug +n04116512 997 rubber_eraser +n04325704 998 stole +n07831146 999 carbonara +n03255030 1000 dumbbell \ No newline at end of file diff --git a/use_case_examples/resnet/imagenet_classes.txt b/use_case_examples/resnet/imagenet_classes.txt new file mode 100644 index 000000000..888d6f51d --- /dev/null +++ b/use_case_examples/resnet/imagenet_classes.txt @@ -0,0 +1,1000 @@ +tench +goldfish +great white shark +tiger shark +hammerhead +electric ray +stingray +cock +hen +ostrich +brambling +goldfinch +house finch +junco +indigo bunting +robin +bulbul +jay +magpie +chickadee +water ouzel +kite +bald eagle +vulture +great grey owl +European fire salamander +common newt +eft +spotted salamander +axolotl +bullfrog +tree frog +tailed frog +loggerhead +leatherback turtle +mud turtle +terrapin +box turtle +banded gecko +common iguana +American chameleon +whiptail +agama +frilled lizard +alligator lizard +Gila monster +green lizard +African chameleon +Komodo dragon +African crocodile +American alligator +triceratops +thunder snake +ringneck snake +hognose snake +green snake +king snake +garter snake +water snake +vine snake +night snake +boa constrictor +rock python +Indian cobra +green mamba +sea snake +horned viper +diamondback +sidewinder +trilobite +harvestman +scorpion +black and gold garden spider +barn spider +garden spider +black widow +tarantula +wolf spider +tick +centipede +black grouse +ptarmigan +ruffed grouse +prairie chicken +peacock +quail +partridge +African grey +macaw +sulphur-crested cockatoo +lorikeet +coucal +bee eater +hornbill +hummingbird +jacamar +toucan +drake +red-breasted merganser +goose +black swan +tusker +echidna +platypus +wallaby +koala +wombat +jellyfish +sea anemone +brain coral +flatworm +nematode +conch +snail +slug +sea slug +chiton +chambered nautilus +Dungeness crab +rock crab +fiddler crab +king crab +American lobster +spiny lobster +crayfish +hermit crab +isopod +white stork +black stork +spoonbill +flamingo +little blue heron +American egret +bittern +crane +limpkin +European gallinule +American coot +bustard +ruddy turnstone +red-backed sandpiper +redshank +dowitcher +oystercatcher +pelican +king penguin +albatross +grey whale +killer whale +dugong +sea lion +Chihuahua +Japanese spaniel +Maltese dog +Pekinese +Shih-Tzu +Blenheim spaniel +papillon +toy terrier +Rhodesian ridgeback +Afghan hound +basset +beagle +bloodhound +bluetick +black-and-tan coonhound +Walker hound +English foxhound +redbone +borzoi +Irish wolfhound +Italian greyhound +whippet +Ibizan hound +Norwegian elkhound +otterhound +Saluki +Scottish deerhound +Weimaraner +Staffordshire bullterrier +American Staffordshire terrier +Bedlington terrier +Border terrier +Kerry blue terrier +Irish terrier +Norfolk terrier +Norwich terrier +Yorkshire terrier +wire-haired fox terrier +Lakeland terrier +Sealyham terrier +Airedale +cairn +Australian terrier +Dandie Dinmont +Boston bull +miniature schnauzer +giant schnauzer +standard schnauzer +Scotch terrier +Tibetan terrier +silky terrier +soft-coated wheaten terrier +West Highland white terrier +Lhasa +flat-coated retriever +curly-coated retriever +golden retriever +Labrador retriever +Chesapeake Bay retriever +German short-haired pointer +vizsla +English setter +Irish setter +Gordon setter +Brittany spaniel +clumber +English springer +Welsh springer spaniel +cocker spaniel +Sussex spaniel +Irish water spaniel +kuvasz +schipperke +groenendael +malinois +briard +kelpie +komondor +Old English sheepdog +Shetland sheepdog +collie +Border collie +Bouvier des Flandres +Rottweiler +German shepherd +Doberman +miniature pinscher +Greater Swiss Mountain dog +Bernese mountain dog +Appenzeller +EntleBucher +boxer +bull mastiff +Tibetan mastiff +French bulldog +Great Dane +Saint Bernard +Eskimo dog +malamute +Siberian husky +dalmatian +affenpinscher +basenji +pug +Leonberg +Newfoundland +Great Pyrenees +Samoyed +Pomeranian +chow +keeshond +Brabancon griffon +Pembroke +Cardigan +toy poodle +miniature poodle +standard poodle +Mexican hairless +timber wolf +white wolf +red wolf +coyote +dingo +dhole +African hunting dog +hyena +red fox +kit fox +Arctic fox +grey fox +tabby +tiger cat +Persian cat +Siamese cat +Egyptian cat +cougar +lynx +leopard +snow leopard +jaguar +lion +tiger +cheetah +brown bear +American black bear +ice bear +sloth bear +mongoose +meerkat +tiger beetle +ladybug +ground beetle +long-horned beetle +leaf beetle +dung beetle +rhinoceros beetle +weevil +fly +bee +ant +grasshopper +cricket +walking stick +cockroach +mantis +cicada +leafhopper +lacewing +dragonfly +damselfly +admiral +ringlet +monarch +cabbage butterfly +sulphur butterfly +lycaenid +starfish +sea urchin +sea cucumber +wood rabbit +hare +Angora +hamster +porcupine +fox squirrel +marmot +beaver +guinea pig +sorrel +zebra +hog +wild boar +warthog +hippopotamus +ox +water buffalo +bison +ram +bighorn +ibex +hartebeest +impala +gazelle +Arabian camel +llama +weasel +mink +polecat +black-footed ferret +otter +skunk +badger +armadillo +three-toed sloth +orangutan +gorilla +chimpanzee +gibbon +siamang +guenon +patas +baboon +macaque +langur +colobus +proboscis monkey +marmoset +capuchin +howler monkey +titi +spider monkey +squirrel monkey +Madagascar cat +indri +Indian elephant +African elephant +lesser panda +giant panda +barracouta +eel +coho +rock beauty +anemone fish +sturgeon +gar +lionfish +puffer +abacus +abaya +academic gown +accordion +acoustic guitar +aircraft carrier +airliner +airship +altar +ambulance +amphibian +analog clock +apiary +apron +ashcan +assault rifle +backpack +bakery +balance beam +balloon +ballpoint +Band Aid +banjo +bannister +barbell +barber chair +barbershop +barn +barometer +barrel +barrow +baseball +basketball +bassinet +bassoon +bathing cap +bath towel +bathtub +beach wagon +beacon +beaker +bearskin +beer bottle +beer glass +bell cote +bib +bicycle-built-for-two +bikini +binder +binoculars +birdhouse +boathouse +bobsled +bolo tie +bonnet +bookcase +bookshop +bottlecap +bow +bow tie +brass +brassiere +breakwater +breastplate +broom +bucket +buckle +bulletproof vest +bullet train +butcher shop +cab +caldron +candle +cannon +canoe +can opener +cardigan +car mirror +carousel +carpenter's kit +carton +car wheel +cash machine +cassette +cassette player +castle +catamaran +CD player +cello +cellular telephone +chain +chainlink fence +chain mail +chain saw +chest +chiffonier +chime +china cabinet +Christmas stocking +church +cinema +cleaver +cliff dwelling +cloak +clog +cocktail shaker +coffee mug +coffeepot +coil +combination lock +computer keyboard +confectionery +container ship +convertible +corkscrew +cornet +cowboy boot +cowboy hat +cradle +crane +crash helmet +crate +crib +Crock Pot +croquet ball +crutch +cuirass +dam +desk +desktop computer +dial telephone +diaper +digital clock +digital watch +dining table +dishrag +dishwasher +disk brake +dock +dogsled +dome +doormat +drilling platform +drum +drumstick +dumbbell +Dutch oven +electric fan +electric guitar +electric locomotive +entertainment center +envelope +espresso maker +face powder +feather boa +file +fireboat +fire engine +fire screen +flagpole +flute +folding chair +football helmet +forklift +fountain +fountain pen +four-poster +freight car +French horn +frying pan +fur coat +garbage truck +gasmask +gas pump +goblet +go-kart +golf ball +golfcart +gondola +gong +gown +grand piano +greenhouse +grille +grocery store +guillotine +hair slide +hair spray +half track +hammer +hamper +hand blower +hand-held computer +handkerchief +hard disc +harmonica +harp +harvester +hatchet +holster +home theater +honeycomb +hook +hoopskirt +horizontal bar +horse cart +hourglass +iPod +iron +jack-o'-lantern +jean +jeep +jersey +jigsaw puzzle +jinrikisha +joystick +kimono +knee pad +knot +lab coat +ladle +lampshade +laptop +lawn mower +lens cap +letter opener +library +lifeboat +lighter +limousine +liner +lipstick +Loafer +lotion +loudspeaker +loupe +lumbermill +magnetic compass +mailbag +mailbox +maillot +maillot +manhole cover +maraca +marimba +mask +matchstick +maypole +maze +measuring cup +medicine chest +megalith +microphone +microwave +military uniform +milk can +minibus +miniskirt +minivan +missile +mitten +mixing bowl +mobile home +Model T +modem +monastery +monitor +moped +mortar +mortarboard +mosque +mosquito net +motor scooter +mountain bike +mountain tent +mouse +mousetrap +moving van +muzzle +nail +neck brace +necklace +nipple +notebook +obelisk +oboe +ocarina +odometer +oil filter +organ +oscilloscope +overskirt +oxcart +oxygen mask +packet +paddle +paddlewheel +padlock +paintbrush +pajama +palace +panpipe +paper towel +parachute +parallel bars +park bench +parking meter +passenger car +patio +pay-phone +pedestal +pencil box +pencil sharpener +perfume +Petri dish +photocopier +pick +pickelhaube +picket fence +pickup +pier +piggy bank +pill bottle +pillow +ping-pong ball +pinwheel +pirate +pitcher +plane +planetarium +plastic bag +plate rack +plow +plunger +Polaroid camera +pole +police van +poncho +pool table +pop bottle +pot +potter's wheel +power drill +prayer rug +printer +prison +projectile +projector +puck +punching bag +purse +quill +quilt +racer +racket +radiator +radio +radio telescope +rain barrel +recreational vehicle +reel +reflex camera +refrigerator +remote control +restaurant +revolver +rifle +rocking chair +rotisserie +rubber eraser +rugby ball +rule +running shoe +safe +safety pin +saltshaker +sandal +sarong +sax +scabbard +scale +school bus +schooner +scoreboard +screen +screw +screwdriver +seat belt +sewing machine +shield +shoe shop +shoji +shopping basket +shopping cart +shovel +shower cap +shower curtain +ski +ski mask +sleeping bag +slide rule +sliding door +slot +snorkel +snowmobile +snowplow +soap dispenser +soccer ball +sock +solar dish +sombrero +soup bowl +space bar +space heater +space shuttle +spatula +speedboat +spider web +spindle +sports car +spotlight +stage +steam locomotive +steel arch bridge +steel drum +stethoscope +stole +stone wall +stopwatch +stove +strainer +streetcar +stretcher +studio couch +stupa +submarine +suit +sundial +sunglass +sunglasses +sunscreen +suspension bridge +swab +sweatshirt +swimming trunks +swing +switch +syringe +table lamp +tank +tape player +teapot +teddy +television +tennis ball +thatch +theater curtain +thimble +thresher +throne +tile roof +toaster +tobacco shop +toilet seat +torch +totem pole +tow truck +toyshop +tractor +trailer truck +tray +trench coat +tricycle +trimaran +tripod +triumphal arch +trolleybus +trombone +tub +turnstile +typewriter keyboard +umbrella +unicycle +upright +vacuum +vase +vault +velvet +vending machine +vestment +viaduct +violin +volleyball +waffle iron +wall clock +wallet +wardrobe +warplane +washbasin +washer +water bottle +water jug +water tower +whiskey jug +whistle +wig +window screen +window shade +Windsor tie +wine bottle +wing +wok +wooden spoon +wool +worm fence +wreck +yawl +yurt +web site +comic book +crossword puzzle +street sign +traffic light +book jacket +menu +plate +guacamole +consomme +hot pot +trifle +ice cream +ice lolly +French loaf +bagel +pretzel +cheeseburger +hotdog +mashed potato +head cabbage +broccoli +cauliflower +zucchini +spaghetti squash +acorn squash +butternut squash +cucumber +artichoke +bell pepper +cardoon +mushroom +Granny Smith +strawberry +orange +lemon +fig +pineapple +banana +jackfruit +custard apple +pomegranate +hay +carbonara +chocolate sauce +dough +meat loaf +pizza +potpie +burrito +red wine +espresso +cup +eggnog +alp +bubble +cliff +coral reef +geyser +lakeside +promontory +sandbar +seashore +valley +volcano +ballplayer +groom +scuba diver +rapeseed +daisy +yellow lady's slipper +corn +acorn +hip +buckeye +coral fungus +agaric +gyromitra +stinkhorn +earthstar +hen-of-the-woods +bolete +ear +toilet tissue \ No newline at end of file diff --git a/use_case_examples/resnet/resnet.py b/use_case_examples/resnet/resnet.py new file mode 100644 index 000000000..0410c5a31 --- /dev/null +++ b/use_case_examples/resnet/resnet.py @@ -0,0 +1,706 @@ +from functools import partial +from typing import Any, Callable, List, Optional, Type, Union + +import torch +import torch.nn as nn +from torch import Tensor + +from torchvision.transforms._presets import ImageClassification +from torchvision.utils import _log_api_usage_once +from torchvision.models._api import register_model, Weights, WeightsEnum +from torchvision.models._meta import _IMAGENET_CATEGORIES +from torchvision.models._utils import _ovewrite_named_param, handle_legacy_interface + + +__all__ = [ + "ResNet", + "ResNet18_Weights", + "ResNet34_Weights", + "ResNet50_Weights", + "ResNet101_Weights", + "ResNet152_Weights", + "ResNeXt50_32X4D_Weights", + "ResNeXt101_32X8D_Weights", + "ResNeXt101_64X4D_Weights", + "Wide_ResNet50_2_Weights", + "Wide_ResNet101_2_Weights", + "resnet18", + "resnet34", + "resnet50", + "resnet101", + "resnet152", + "resnext50_32x4d", + "resnext101_32x8d", + "resnext101_64x4d", + "wide_resnet50_2", + "wide_resnet101_2", +] + + +def conv3x3(in_planes: int, out_planes: int, stride: int = 1, groups: int = 1, dilation: int = 1) -> nn.Conv2d: + """3x3 convolution with padding""" + return nn.Conv2d( + in_planes, + out_planes, + kernel_size=3, + stride=stride, + padding=dilation, + groups=groups, + bias=False, + dilation=dilation, + ) + + +def conv1x1(in_planes: int, out_planes: int, stride: int = 1) -> nn.Conv2d: + """1x1 convolution""" + return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False) + + +class BasicBlock(nn.Module): + expansion: int = 1 + + def __init__( + self, + inplanes: int, + planes: int, + stride: int = 1, + downsample: Optional[nn.Module] = None, + groups: int = 1, + base_width: int = 64, + dilation: int = 1, + norm_layer: Optional[Callable[..., nn.Module]] = None, + ) -> None: + super().__init__() + if norm_layer is None: + norm_layer = nn.BatchNorm2d + if groups != 1 or base_width != 64: + raise ValueError("BasicBlock only supports groups=1 and base_width=64") + if dilation > 1: + raise NotImplementedError("Dilation > 1 not supported in BasicBlock") + # Both self.conv1 and self.downsample layers downsample the input when stride != 1 + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = norm_layer(planes) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = norm_layer(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x: Tensor) -> Tensor: + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + + return out + + +class Bottleneck(nn.Module): + # Bottleneck in torchvision places the stride for downsampling at 3x3 convolution(self.conv2) + # while original implementation places the stride at the first 1x1 convolution(self.conv1) + # according to "Deep residual learning for image recognition" https://arxiv.org/abs/1512.03385. + # This variant is also known as ResNet V1.5 and improves accuracy according to + # https://ngc.nvidia.com/catalog/model-scripts/nvidia:resnet_50_v1_5_for_pytorch. + + expansion: int = 4 + + def __init__( + self, + inplanes: int, + planes: int, + stride: int = 1, + downsample: Optional[nn.Module] = None, + groups: int = 1, + base_width: int = 64, + dilation: int = 1, + norm_layer: Optional[Callable[..., nn.Module]] = None, + ) -> None: + super().__init__() + if norm_layer is None: + norm_layer = nn.BatchNorm2d + width = int(planes * (base_width / 64.0)) * groups + # Both self.conv2 and self.downsample layers downsample the input when stride != 1 + self.conv1 = conv1x1(inplanes, width) + self.bn1 = norm_layer(width) + self.conv2 = conv3x3(width, width, stride, groups, dilation) + self.bn2 = norm_layer(width) + self.conv3 = conv1x1(width, planes * self.expansion) + self.bn3 = norm_layer(planes * self.expansion) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x: Tensor) -> Tensor: + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + + return out + + +class ResNet(nn.Module): + def __init__( + self, + block: Type[Union[BasicBlock, Bottleneck]], + layers: List[int], + num_classes: int = 1000, + zero_init_residual: bool = False, + groups: int = 1, + width_per_group: int = 64, + replace_stride_with_dilation: Optional[List[bool]] = None, + norm_layer: Optional[Callable[..., nn.Module]] = None, + ) -> None: + super().__init__() + _log_api_usage_once(self) + if norm_layer is None: + norm_layer = nn.BatchNorm2d + self._norm_layer = norm_layer + + self.inplanes = 64 + self.dilation = 1 + if replace_stride_with_dilation is None: + # each element in the tuple indicates if we should replace + # the 2x2 stride with a dilated convolution instead + replace_stride_with_dilation = [False, False, False] + if len(replace_stride_with_dilation) != 3: + raise ValueError( + "replace_stride_with_dilation should be None " + f"or a 3-element tuple, got {replace_stride_with_dilation}" + ) + self.groups = groups + self.base_width = width_per_group + self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = norm_layer(self.inplanes) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) + # self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) # FIXME + self.avgpool = nn.AvgPool2d(kernel_size=7, stride=1, padding=0) + self.fc = nn.Linear(512 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + # Zero-initialize the last BN in each residual branch, + # so that the residual branch starts with zeros, and each residual block behaves like an identity. + # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677 + if zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck) and m.bn3.weight is not None: + nn.init.constant_(m.bn3.weight, 0) # type: ignore[arg-type] + elif isinstance(m, BasicBlock) and m.bn2.weight is not None: + nn.init.constant_(m.bn2.weight, 0) # type: ignore[arg-type] + + def _make_layer( + self, + block: Type[Union[BasicBlock, Bottleneck]], + planes: int, + blocks: int, + stride: int = 1, + dilate: bool = False, + ) -> nn.Sequential: + norm_layer = self._norm_layer + downsample = None + previous_dilation = self.dilation + if dilate: + self.dilation *= stride + stride = 1 + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + conv1x1(self.inplanes, planes * block.expansion, stride), + norm_layer(planes * block.expansion), + ) + + layers = [] + layers.append( + block( + self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer + ) + ) + self.inplanes = planes * block.expansion + for _ in range(1, blocks): + layers.append( + block( + self.inplanes, + planes, + groups=self.groups, + base_width=self.base_width, + dilation=self.dilation, + norm_layer=norm_layer, + ) + ) + + return nn.Sequential(*layers) + + def _forward_impl(self, x: Tensor) -> Tensor: + # See note [TorchScript super()] + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = self.avgpool(x) + x = torch.flatten(x, 1) + x = self.fc(x) + + return x + + def forward(self, x: Tensor) -> Tensor: + return self._forward_impl(x) + + +def _resnet( + block: Type[Union[BasicBlock, Bottleneck]], + layers: List[int], + weights: Optional[WeightsEnum], + progress: bool, + **kwargs: Any, +) -> ResNet: + if weights is not None: + _ovewrite_named_param(kwargs, "num_classes", len(weights.meta["categories"])) + + model = ResNet(block, layers, **kwargs) + + if weights is not None: + model.load_state_dict(weights.get_state_dict(progress=progress)) + + return model + + +_COMMON_META = { + "min_size": (1, 1), + "categories": _IMAGENET_CATEGORIES, +} + + +class ResNet18_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/resnet18-f37072fd.pth", + transforms=partial(ImageClassification, crop_size=224), + meta={ + **_COMMON_META, + "num_params": 11689512, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", + "_metrics": { + "ImageNet-1K": { + "acc@1": 69.758, + "acc@5": 89.078, + } + }, + "_ops": 1.814, + "_file_size": 44.661, + "_docs": """These weights reproduce closely the results of the paper using a simple training recipe.""", + }, + ) + DEFAULT = IMAGENET1K_V1 + + +class ResNet34_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/resnet34-b627a593.pth", + transforms=partial(ImageClassification, crop_size=224), + meta={ + **_COMMON_META, + "num_params": 21797672, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", + "_metrics": { + "ImageNet-1K": { + "acc@1": 73.314, + "acc@5": 91.420, + } + }, + "_ops": 3.664, + "_file_size": 83.275, + "_docs": """These weights reproduce closely the results of the paper using a simple training recipe.""", + }, + ) + DEFAULT = IMAGENET1K_V1 + + +class ResNet50_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/resnet50-0676ba61.pth", + transforms=partial(ImageClassification, crop_size=224), + meta={ + **_COMMON_META, + "num_params": 25557032, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", + "_metrics": { + "ImageNet-1K": { + "acc@1": 76.130, + "acc@5": 92.862, + } + }, + "_ops": 4.089, + "_file_size": 97.781, + "_docs": """These weights reproduce closely the results of the paper using a simple training recipe.""", + }, + ) + IMAGENET1K_V2 = Weights( + url="https://download.pytorch.org/models/resnet50-11ad3fa6.pth", + transforms=partial(ImageClassification, crop_size=224, resize_size=232), + meta={ + **_COMMON_META, + "num_params": 25557032, + "recipe": "https://github.com/pytorch/vision/issues/3995#issuecomment-1013906621", + "_metrics": { + "ImageNet-1K": { + "acc@1": 80.858, + "acc@5": 95.434, + } + }, + "_ops": 4.089, + "_file_size": 97.79, + "_docs": """ + These weights improve upon the results of the original paper by using TorchVision's `new training recipe + `_. + """, + }, + ) + DEFAULT = IMAGENET1K_V2 + + +class ResNet101_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/resnet101-63fe2227.pth", + transforms=partial(ImageClassification, crop_size=224), + meta={ + **_COMMON_META, + "num_params": 44549160, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", + "_metrics": { + "ImageNet-1K": { + "acc@1": 77.374, + "acc@5": 93.546, + } + }, + "_ops": 7.801, + "_file_size": 170.511, + "_docs": """These weights reproduce closely the results of the paper using a simple training recipe.""", + }, + ) + IMAGENET1K_V2 = Weights( + url="https://download.pytorch.org/models/resnet101-cd907fc2.pth", + transforms=partial(ImageClassification, crop_size=224, resize_size=232), + meta={ + **_COMMON_META, + "num_params": 44549160, + "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", + "_metrics": { + "ImageNet-1K": { + "acc@1": 81.886, + "acc@5": 95.780, + } + }, + "_ops": 7.801, + "_file_size": 170.53, + "_docs": """ + These weights improve upon the results of the original paper by using TorchVision's `new training recipe + `_. + """, + }, + ) + DEFAULT = IMAGENET1K_V2 + + +class ResNet152_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/resnet152-394f9c45.pth", + transforms=partial(ImageClassification, crop_size=224), + meta={ + **_COMMON_META, + "num_params": 60192808, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnet", + "_metrics": { + "ImageNet-1K": { + "acc@1": 78.312, + "acc@5": 94.046, + } + }, + "_ops": 11.514, + "_file_size": 230.434, + "_docs": """These weights reproduce closely the results of the paper using a simple training recipe.""", + }, + ) + IMAGENET1K_V2 = Weights( + url="https://download.pytorch.org/models/resnet152-f82ba261.pth", + transforms=partial(ImageClassification, crop_size=224, resize_size=232), + meta={ + **_COMMON_META, + "num_params": 60192808, + "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", + "_metrics": { + "ImageNet-1K": { + "acc@1": 82.284, + "acc@5": 96.002, + } + }, + "_ops": 11.514, + "_file_size": 230.474, + "_docs": """ + These weights improve upon the results of the original paper by using TorchVision's `new training recipe + `_. + """, + }, + ) + DEFAULT = IMAGENET1K_V2 + + +class ResNeXt50_32X4D_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth", + transforms=partial(ImageClassification, crop_size=224), + meta={ + **_COMMON_META, + "num_params": 25028904, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnext", + "_metrics": { + "ImageNet-1K": { + "acc@1": 77.618, + "acc@5": 93.698, + } + }, + "_ops": 4.23, + "_file_size": 95.789, + "_docs": """These weights reproduce closely the results of the paper using a simple training recipe.""", + }, + ) + IMAGENET1K_V2 = Weights( + url="https://download.pytorch.org/models/resnext50_32x4d-1a0047aa.pth", + transforms=partial(ImageClassification, crop_size=224, resize_size=232), + meta={ + **_COMMON_META, + "num_params": 25028904, + "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", + "_metrics": { + "ImageNet-1K": { + "acc@1": 81.198, + "acc@5": 95.340, + } + }, + "_ops": 4.23, + "_file_size": 95.833, + "_docs": """ + These weights improve upon the results of the original paper by using TorchVision's `new training recipe + `_. + """, + }, + ) + DEFAULT = IMAGENET1K_V2 + + +class ResNeXt101_32X8D_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth", + transforms=partial(ImageClassification, crop_size=224), + meta={ + **_COMMON_META, + "num_params": 88791336, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#resnext", + "_metrics": { + "ImageNet-1K": { + "acc@1": 79.312, + "acc@5": 94.526, + } + }, + "_ops": 16.414, + "_file_size": 339.586, + "_docs": """These weights reproduce closely the results of the paper using a simple training recipe.""", + }, + ) + IMAGENET1K_V2 = Weights( + url="https://download.pytorch.org/models/resnext101_32x8d-110c445d.pth", + transforms=partial(ImageClassification, crop_size=224, resize_size=232), + meta={ + **_COMMON_META, + "num_params": 88791336, + "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres", + "_metrics": { + "ImageNet-1K": { + "acc@1": 82.834, + "acc@5": 96.228, + } + }, + "_ops": 16.414, + "_file_size": 339.673, + "_docs": """ + These weights improve upon the results of the original paper by using TorchVision's `new training recipe + `_. + """, + }, + ) + DEFAULT = IMAGENET1K_V2 + + +class ResNeXt101_64X4D_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/resnext101_64x4d-173b62eb.pth", + transforms=partial(ImageClassification, crop_size=224, resize_size=232), + meta={ + **_COMMON_META, + "num_params": 83455272, + "recipe": "https://github.com/pytorch/vision/pull/5935", + "_metrics": { + "ImageNet-1K": { + "acc@1": 83.246, + "acc@5": 96.454, + } + }, + "_ops": 15.46, + "_file_size": 319.318, + "_docs": """ + These weights were trained from scratch by using TorchVision's `new training recipe + `_. + """, + }, + ) + DEFAULT = IMAGENET1K_V1 + + +class Wide_ResNet50_2_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/wide_resnet50_2-95faca4d.pth", + transforms=partial(ImageClassification, crop_size=224), + meta={ + **_COMMON_META, + "num_params": 68883240, + "recipe": "https://github.com/pytorch/vision/pull/912#issue-445437439", + "_metrics": { + "ImageNet-1K": { + "acc@1": 78.468, + "acc@5": 94.086, + } + }, + "_ops": 11.398, + "_file_size": 131.82, + "_docs": """These weights reproduce closely the results of the paper using a simple training recipe.""", + }, + ) + IMAGENET1K_V2 = Weights( + url="https://download.pytorch.org/models/wide_resnet50_2-9ba9bcbe.pth", + transforms=partial(ImageClassification, crop_size=224, resize_size=232), + meta={ + **_COMMON_META, + "num_params": 68883240, + "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-fixres", + "_metrics": { + "ImageNet-1K": { + "acc@1": 81.602, + "acc@5": 95.758, + } + }, + "_ops": 11.398, + "_file_size": 263.124, + "_docs": """ + These weights improve upon the results of the original paper by using TorchVision's `new training recipe + `_. + """, + }, + ) + DEFAULT = IMAGENET1K_V2 + + +class Wide_ResNet101_2_Weights(WeightsEnum): + IMAGENET1K_V1 = Weights( + url="https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth", + transforms=partial(ImageClassification, crop_size=224), + meta={ + **_COMMON_META, + "num_params": 126886696, + "recipe": "https://github.com/pytorch/vision/pull/912#issue-445437439", + "_metrics": { + "ImageNet-1K": { + "acc@1": 78.848, + "acc@5": 94.284, + } + }, + "_ops": 22.753, + "_file_size": 242.896, + "_docs": """These weights reproduce closely the results of the paper using a simple training recipe.""", + }, + ) + IMAGENET1K_V2 = Weights( + url="https://download.pytorch.org/models/wide_resnet101_2-d733dc28.pth", + transforms=partial(ImageClassification, crop_size=224, resize_size=232), + meta={ + **_COMMON_META, + "num_params": 126886696, + "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe", + "_metrics": { + "ImageNet-1K": { + "acc@1": 82.510, + "acc@5": 96.020, + } + }, + "_ops": 22.753, + "_file_size": 484.747, + "_docs": """ + These weights improve upon the results of the original paper by using TorchVision's `new training recipe + `_. + """, + }, + ) + DEFAULT = IMAGENET1K_V2 + + +@register_model() +@handle_legacy_interface(weights=("pretrained", ResNet18_Weights.IMAGENET1K_V1)) +def resnet18_custom(*, weights: Optional[ResNet18_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet: + """ResNet-18 from `Deep Residual Learning for Image Recognition `__. + + Args: + weights (:class:`~torchvision.models.ResNet18_Weights`, optional): The + pretrained weights to use. See + :class:`~torchvision.models.ResNet18_Weights` below for + more details, and possible values. By default, no pre-trained + weights are used. + progress (bool, optional): If True, displays a progress bar of the + download to stderr. Default is True. + **kwargs: parameters passed to the ``torchvision.models.resnet.ResNet`` + base class. Please refer to the `source code + `_ + for more details about this class. + + .. autoclass:: torchvision.models.ResNet18_Weights + :members: + """ + weights = ResNet18_Weights.verify(weights) + + return _resnet(BasicBlock, [2, 2, 2, 2], weights, progress, **kwargs) diff --git a/use_case_examples/resnet/resnet_fhe.ipynb b/use_case_examples/resnet/resnet_fhe.ipynb new file mode 100644 index 000000000..1fb361afa --- /dev/null +++ b/use_case_examples/resnet/resnet_fhe.ipynb @@ -0,0 +1,1989 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/ubuntu/venv/lib/python3.9/site-packages/torchvision/models/_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead.\n", + " warnings.warn(\n", + "/home/ubuntu/venv/lib/python3.9/site-packages/torchvision/models/_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=ResNet18_Weights.IMAGENET1K_V1`. You can also use `weights=ResNet18_Weights.DEFAULT` to get the most up-to-date weights.\n", + " warnings.warn(msg)\n" + ] + } + ], + "source": [ + "import torch\n", + "from resnet import resnet18_custom\n", + "# from torchvision.models.quantization import resnet18 as load_resnet18\n", + "# from torchvision.models.quantization import ResNet18_QuantizedWeights\n", + "from concrete.ml.torch.compile import compile_torch_model\n", + "\n", + "\n", + "# Load the ResNet18 model without pretrained weights\n", + "resnet18 = resnet18_custom(pretrained=True)\n", + "\n", + "# # Step 1: Initialize model with the best available weights\n", + "# weights = ResNet18_QuantizedWeights.DEFAULT\n", + "# resnet18 = load_resnet18(weights=weights, quantize=True)\n", + "# resnet18.eval()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'resnet18.onnx'" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Define a dummy input tensor with the size of 1x3x224x224 (batch size x channels x height x width)\n", + "dummy_input = torch.randn(10, 3, 224, 224)\n", + "\n", + "# Set the model to evaluation mode\n", + "resnet18.eval()\n", + "\n", + "# Path to save the ONNX model\n", + "onnx_model_path = \"resnet18.onnx\"\n", + "\n", + "# Export the model to ONNX format\n", + "torch.onnx.export(resnet18, dummy_input, onnx_model_path, verbose=False)\n", + "\n", + "onnx_model_path" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "ResNet(\n", + " (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n", + " (layer1): Sequential(\n", + " (0): BasicBlock(\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " (1): BasicBlock(\n", + " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): BasicBlock(\n", + " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (1): BasicBlock(\n", + " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): BasicBlock(\n", + " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (1): BasicBlock(\n", + " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): BasicBlock(\n", + " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (1): BasicBlock(\n", + " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (relu): ReLU(inplace=True)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (avgpool): AvgPool2d(kernel_size=7, stride=1, padding=0)\n", + " (fc): Linear(in_features=512, out_features=1000, bias=True)\n", + ")" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "resnet18" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Accuracy of the ResNet18 model on the images: 90.90909091%\n", + "Top-5 Accuracy of the ResNet18 model on the images: 95.45454545%\n" + ] + } + ], + "source": [ + "from io import BytesIO\n", + "\n", + "import requests\n", + "import torch\n", + "from PIL import Image\n", + "from torchvision import models, transforms\n", + "\n", + "\n", + "# Read the ImageNet classes file and create a list\n", + "with open(\"./imagenet_classes.txt\", \"r\") as f:\n", + " imagenet_classes = [line.strip() for line in f.readlines()]\n", + "\n", + "\n", + "# Create a dictionary to map class names to indices\n", + "class_to_index = {cls: idx for idx, cls in enumerate(imagenet_classes)}\n", + "\n", + "\n", + "transform = transforms.Compose(\n", + " [\n", + " transforms.Resize(256),\n", + " transforms.CenterCrop(224),\n", + " transforms.ToTensor(),\n", + " transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n", + " ]\n", + ")\n", + "\n", + "# Download an example image from the web\n", + "image_urls = [\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01443537_goldfish.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01614925_bald_eagle.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01697457_African_crocodile.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01592084_chickadee.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01601694_water_ouzel.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01739381_vine_snake.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01806567_quail.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01917289_brain_coral.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02077923_sea_lion.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02051845_pelican.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02110185_Siberian_husky.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02165456_ladybug.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02325366_wood_rabbit.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02391049_zebra.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02481823_chimpanzee.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02510455_giant_panda.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02643566_lionfish.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02787622_banjo.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02817516_bearskin.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02871525_bookshop.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02930766_cab.JPEG\",\n", + " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02974003_car_wheel.JPEG\",\n", + "]\n", + "\n", + "# Download and transform images, and collect labels\n", + "images = []\n", + "labels = []\n", + "for image_url in image_urls:\n", + " class_name = '_'.join(image_url.split('/')[-1].split('.')[0].split('_')[1:]).replace('_', ' ')\n", + " if class_name in class_to_index:\n", + " response = requests.get(image_url)\n", + " img = Image.open(BytesIO(response.content))\n", + " img = transform(img)\n", + " images.append(img)\n", + " labels.append(class_to_index[class_name])\n", + "\n", + "# Stack images to create a mini-batch\n", + "images = torch.stack(images)\n", + "labels = torch.tensor(labels)\n", + "\n", + "# Forward pass through the model to get the predictions\n", + "with torch.no_grad():\n", + " outputs = resnet18(images)\n", + " _, predicted = torch.max(outputs, 1)\n", + "\n", + "\n", + "def calculate_accuracy(predicted, labels):\n", + " correct = (predicted == labels).sum().item()\n", + " total = labels.size(0)\n", + " accuracy = 100 * correct / total\n", + " return accuracy\n", + "\n", + "def calculate_topk_accuracy(outputs, labels, topk=5):\n", + " _, topk_predicted = torch.topk(outputs, topk, dim=1)\n", + " correct_topk = sum([labels[i] in topk_predicted[i] for i in range(len(labels))])\n", + " total = labels.size(0)\n", + " topk_accuracy = 100 * correct_topk / total\n", + " return topk_accuracy\n", + "\n", + "with torch.no_grad():\n", + " outputs = resnet18(images)\n", + " _, predicted = torch.max(outputs, 1)\n", + "\n", + "accuracy = calculate_accuracy(predicted, labels)\n", + "print(f\"Accuracy of the ResNet18 model on the images: {accuracy:.8f}%\")\n", + "\n", + "topk_accuracy = calculate_topk_accuracy(outputs, labels, topk=5)\n", + "print(f\"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy:.8f}%\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0.9090909361839294" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from concrete.ml.torch.compile import build_quantized_module, compile_torch_model\n", + "\n", + "# q_module = compile_torch_model(\n", + "# resnet18,\n", + "# torch_inputset=images,\n", + "# n_bits={\"model_inputs\": 8, \"op_inputs\": 6, \"op_weights\": 6, \"model_outputs\": 8},\n", + "# # rounding_threshold_bits={\"n_bits\": 6, \"method\":\"APPROXIMATE\"},\n", + "# )\n", + "from concrete.fhe import Configuration, ParameterSelectionStrategy\n", + "\n", + "# config = Configuration(parameter_selection_strategy = ParameterSelectionStrategy.MONO, single_precision = True)\n", + "q_module = compile_torch_model(\n", + " resnet18,\n", + " torch_inputset=images,\n", + " n_bits={\"model_inputs\": 8, \"op_inputs\": 6, \"op_weights\": 6, \"model_outputs\": 8},\n", + " rounding_threshold_bits={\"n_bits\": 7, \"method\":\"APPROXIMATE\"},\n", + " # configuration=config,\n", + " # p_error = 0.9\n", + ")\n", + "\n", + "with torch.no_grad():\n", + " outputs_fhe = q_module.forward(images.detach().numpy(), fhe=\"disable\")\n", + " probabilities_fhe = torch.nn.functional.softmax(torch.from_numpy(outputs_fhe), dim=-1)\n", + " outputs = resnet18(images)\n", + " probabilities = torch.nn.functional.softmax(outputs, dim=-1)\n", + "\n", + "# Calculate and store accuracy\n", + "fhe_accuracy_vs_fp32 = (\n", + " (probabilities_fhe.argmax(-1) == probabilities.argmax(-1)).float().mean().item()\n", + ")\n", + "fhe_accuracy_vs_fp32" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " %0 = _x # EncryptedTensor ∈ [-32, 31]\n", + " %1 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %2 = -4 # ClearScalar ∈ [-4, -4]\n", + " %3 = multiply(%1, %2) # EncryptedTensor ∈ [-4, -4]\n", + " %4 = (%3[:, :, 3:227, 3:227] = %0) # EncryptedTensor ∈ [-32, 31]\n", + " %5 = [[[[ 0 ... -1 -1]]]] # ClearTensor ∈ [-24, 31] @ /conv1/Conv.conv\n", + " %6 = conv2d(%4, %5, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-5898, 5936] @ /conv1/Conv.conv\n", + " %7 = round_bit_pattern(%6, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-5888, 5888] @ /conv1/Conv.conv_rounding\n", + " %8 = subgraph(%7) # EncryptedTensor ∈ [0, 63]\n", + " %9 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %10 = 0 # ClearScalar ∈ [0, 0]\n", + " %11 = multiply(%9, %10) # EncryptedTensor ∈ [0, 0]\n", + " %12 = (%11[:, :, 1:113, 1:113] = %8) # EncryptedTensor ∈ [0, 63]\n", + " %13 = maxpool2d(%12, kernel_shape=(3, 3), strides=(2, 2), pads=(0, 0, 0, 0), dilations=(1, 1), ceil_mode=False) # EncryptedTensor ∈ [0, 63]\n", + " %14 = subgraph(%13) # EncryptedTensor ∈ [0, 63]\n", + " %15 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %16 = 0 # ClearScalar ∈ [0, 0]\n", + " %17 = multiply(%15, %16) # EncryptedTensor ∈ [0, 0]\n", + " %18 = (%17[:, :, 1:57, 1:57] = %14) # EncryptedTensor ∈ [0, 63]\n", + " %19 = [[[[ 2 - ... 0 -3]]]] # ClearTensor ∈ [-31, 28] @ /layer1/layer1.0/conv1/Conv.conv\n", + " %20 = conv2d(%18, %19, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-7704, 2874] @ /layer1/layer1.0/conv1/Conv.conv\n", + " %21 = round_bit_pattern(%20, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-7680, 2816] @ /layer1/layer1.0/conv1/Conv.conv_rounding\n", + " %22 = subgraph(%21) # EncryptedTensor ∈ [0, 62]\n", + " %23 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %24 = 0 # ClearScalar ∈ [0, 0]\n", + " %25 = multiply(%23, %24) # EncryptedTensor ∈ [0, 0]\n", + " %26 = (%25[:, :, 1:57, 1:57] = %22) # EncryptedTensor ∈ [0, 62]\n", + " %27 = [[[[ 1 - ... -3 0]]]] # ClearTensor ∈ [-31, 21] @ /layer1/layer1.0/conv2/Conv.conv\n", + " %28 = conv2d(%26, %27, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4267, 3767] @ /layer1/layer1.0/conv2/Conv.conv\n", + " %29 = round_bit_pattern(%28, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4224, 3712] @ /layer1/layer1.0/conv2/Conv.conv_rounding\n", + " %30 = subgraph(%29) # EncryptedTensor ∈ [-32, 31]\n", + " %31 = subgraph(%13) # EncryptedTensor ∈ [-32, 31]\n", + " %32 = subgraph(%30) # EncryptedTensor ∈ [-105, 90]\n", + " %33 = subgraph(%31) # EncryptedTensor ∈ [0, 142]\n", + " %34 = 1 # ClearScalar ∈ [1, 1]\n", + " %35 = multiply(%34, %33) # EncryptedTensor ∈ [0, 142]\n", + " %36 = add(%32, %35) # EncryptedTensor ∈ [-103, 150]\n", + " %37 = subgraph(%36) # EncryptedTensor ∈ [0, 63]\n", + " %38 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %39 = 0 # ClearScalar ∈ [0, 0]\n", + " %40 = multiply(%38, %39) # EncryptedTensor ∈ [0, 0]\n", + " %41 = (%40[:, :, 1:57, 1:57] = %37) # EncryptedTensor ∈ [0, 63]\n", + " %42 = [[[[ 1 0 ... 1 -5]]]] # ClearTensor ∈ [-29, 31] @ /layer1/layer1.1/conv1/Conv.conv\n", + " %43 = conv2d(%41, %42, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-5571, 4822] @ /layer1/layer1.1/conv1/Conv.conv\n", + " %44 = round_bit_pattern(%43, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-5632, 4864] @ /layer1/layer1.1/conv1/Conv.conv_rounding\n", + " %45 = subgraph(%44) # EncryptedTensor ∈ [0, 63]\n", + " %46 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %47 = 0 # ClearScalar ∈ [0, 0]\n", + " %48 = multiply(%46, %47) # EncryptedTensor ∈ [0, 0]\n", + " %49 = (%48[:, :, 1:57, 1:57] = %45) # EncryptedTensor ∈ [0, 63]\n", + " %50 = [[[[ -1 ... 10 3]]]] # ClearTensor ∈ [-31, 23] @ /layer1/layer1.1/conv2/Conv.conv\n", + " %51 = conv2d(%49, %50, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-5422, 3434] @ /layer1/layer1.1/conv2/Conv.conv\n", + " %52 = round_bit_pattern(%51, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-5376, 3456] @ /layer1/layer1.1/conv2/Conv.conv_rounding\n", + " %53 = subgraph(%52) # EncryptedTensor ∈ [-32, 31]\n", + " %54 = subgraph(%36) # EncryptedTensor ∈ [-32, 31]\n", + " %55 = subgraph(%53) # EncryptedTensor ∈ [-135, 89]\n", + " %56 = subgraph(%54) # EncryptedTensor ∈ [0, 92]\n", + " %57 = 1 # ClearScalar ∈ [1, 1]\n", + " %58 = multiply(%57, %56) # EncryptedTensor ∈ [0, 92]\n", + " %59 = add(%55, %58) # EncryptedTensor ∈ [-125, 130]\n", + " %60 = subgraph(%59) # EncryptedTensor ∈ [0, 63]\n", + " %61 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %62 = 0 # ClearScalar ∈ [0, 0]\n", + " %63 = multiply(%61, %62) # EncryptedTensor ∈ [0, 0]\n", + " %64 = (%63[:, :, 1:57, 1:57] = %60) # EncryptedTensor ∈ [0, 63]\n", + " %65 = [[[[-4 -7 ... -3 3]]]] # ClearTensor ∈ [-21, 31] @ /layer2/layer2.0/conv1/Conv.conv\n", + " %66 = conv2d(%64, %65, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3130, 2989] @ /layer2/layer2.0/conv1/Conv.conv\n", + " %67 = round_bit_pattern(%66, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3136, 3008] @ /layer2/layer2.0/conv1/Conv.conv_rounding\n", + " %68 = subgraph(%67) # EncryptedTensor ∈ [0, 63]\n", + " %69 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %70 = 0 # ClearScalar ∈ [0, 0]\n", + " %71 = multiply(%69, %70) # EncryptedTensor ∈ [0, 0]\n", + " %72 = (%71[:, :, 1:29, 1:29] = %68) # EncryptedTensor ∈ [0, 63]\n", + " %73 = [[[[ 0 ... -1 0]]]] # ClearTensor ∈ [-21, 31] @ /layer2/layer2.0/conv2/Conv.conv\n", + " %74 = conv2d(%72, %73, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4567, 6827] @ /layer2/layer2.0/conv2/Conv.conv\n", + " %75 = round_bit_pattern(%74, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4608, 6784] @ /layer2/layer2.0/conv2/Conv.conv_rounding\n", + " %76 = subgraph(%59) # EncryptedTensor ∈ [0, 63]\n", + " %77 = [[[[ 1]] ... [[ 1]]]] # ClearTensor ∈ [-25, 31] @ /layer2/layer2.0/downsample/downsample.0/Conv.conv\n", + " %78 = conv2d(%76, %77, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-1242, 985] @ /layer2/layer2.0/downsample/downsample.0/Conv.conv\n", + " %79 = round_bit_pattern(%78, lsbs_to_remove=5, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-1248, 992] @ /layer2/layer2.0/downsample/downsample.0/Conv.conv_rounding\n", + " %80 = subgraph(%75) # EncryptedTensor ∈ [-32, 29]\n", + " %81 = subgraph(%79) # EncryptedTensor ∈ [-32, 31]\n", + " %82 = subgraph(%80) # EncryptedTensor ∈ [-71, 126]\n", + " %83 = subgraph(%81) # EncryptedTensor ∈ [-78, 66]\n", + " %84 = 1 # ClearScalar ∈ [1, 1]\n", + " %85 = multiply(%84, %83) # EncryptedTensor ∈ [-78, 66]\n", + " %86 = add(%82, %85) # EncryptedTensor ∈ [-105, 138]\n", + " %87 = subgraph(%86) # EncryptedTensor ∈ [0, 59]\n", + " %88 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %89 = 0 # ClearScalar ∈ [0, 0]\n", + " %90 = multiply(%88, %89) # EncryptedTensor ∈ [0, 0]\n", + " %91 = (%90[:, :, 1:29, 1:29] = %87) # EncryptedTensor ∈ [0, 59]\n", + " %92 = [[[[ 0 ... 0 -1]]]] # ClearTensor ∈ [-26, 31] @ /layer2/layer2.1/conv1/Conv.conv\n", + " %93 = conv2d(%91, %92, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3652, 3308] @ /layer2/layer2.1/conv1/Conv.conv\n", + " %94 = round_bit_pattern(%93, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3648, 3328] @ /layer2/layer2.1/conv1/Conv.conv_rounding\n", + " %95 = subgraph(%94) # EncryptedTensor ∈ [0, 63]\n", + " %96 = ones() # EncryptedTensor ∈ [1, 1]\n", + " %97 = 0 # ClearScalar ∈ [0, 0]\n", + " %98 = multiply(%96, %97) # EncryptedTensor ∈ [0, 0]\n", + " %99 = (%98[:, :, 1:29, 1:29] = %95) # EncryptedTensor ∈ [0, 63]\n", + "%100 = [[[[-1 0 ... -1 -3]]]] # ClearTensor ∈ [-31, 22] @ /layer2/layer2.1/conv2/Conv.conv\n", + "%101 = conv2d(%99, %100, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3240, 2807] @ /layer2/layer2.1/conv2/Conv.conv\n", + "%102 = round_bit_pattern(%101, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3264, 2816] @ /layer2/layer2.1/conv2/Conv.conv_rounding\n", + "%103 = subgraph(%102) # EncryptedTensor ∈ [-30, 30]\n", + "%104 = subgraph(%86) # EncryptedTensor ∈ [-32, 27]\n", + "%105 = subgraph(%103) # EncryptedTensor ∈ [-103, 96]\n", + "%106 = subgraph(%104) # EncryptedTensor ∈ [0, 153]\n", + "%107 = 1 # ClearScalar ∈ [1, 1]\n", + "%108 = multiply(%107, %106) # EncryptedTensor ∈ [0, 153]\n", + "%109 = add(%105, %108) # EncryptedTensor ∈ [-103, 142]\n", + "%110 = subgraph(%109) # EncryptedTensor ∈ [0, 61]\n", + "%111 = ones() # EncryptedTensor ∈ [1, 1]\n", + "%112 = 0 # ClearScalar ∈ [0, 0]\n", + "%113 = multiply(%111, %112) # EncryptedTensor ∈ [0, 0]\n", + "%114 = (%113[:, :, 1:29, 1:29] = %110) # EncryptedTensor ∈ [0, 61]\n", + "%115 = [[[[-1 -1 ... 0 -1]]]] # ClearTensor ∈ [-24, 31] @ /layer3/layer3.0/conv1/Conv.conv\n", + "%116 = conv2d(%114, %115, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4397, 3425] @ /layer3/layer3.0/conv1/Conv.conv\n", + "%117 = round_bit_pattern(%116, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4352, 3456] @ /layer3/layer3.0/conv1/Conv.conv_rounding\n", + "%118 = subgraph(%117) # EncryptedTensor ∈ [0, 63]\n", + "%119 = ones() # EncryptedTensor ∈ [1, 1]\n", + "%120 = 0 # ClearScalar ∈ [0, 0]\n", + "%121 = multiply(%119, %120) # EncryptedTensor ∈ [0, 0]\n", + "%122 = (%121[:, :, 1:15, 1:15] = %118) # EncryptedTensor ∈ [0, 63]\n", + "%123 = [[[[ 0 -2 ... -1 -2]]]] # ClearTensor ∈ [-21, 31] @ /layer3/layer3.0/conv2/Conv.conv\n", + "%124 = conv2d(%122, %123, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4434, 5529] @ /layer3/layer3.0/conv2/Conv.conv\n", + "%125 = round_bit_pattern(%124, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4480, 5504] @ /layer3/layer3.0/conv2/Conv.conv_rounding\n", + "%126 = subgraph(%109) # EncryptedTensor ∈ [0, 61]\n", + "%127 = [[[[ 0]] ... [[ 1]]]] # ClearTensor ∈ [-31, 24] @ /layer3/layer3.0/downsample/downsample.0/Conv.conv\n", + "%128 = conv2d(%126, %127, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-1238, 850] @ /layer3/layer3.0/downsample/downsample.0/Conv.conv\n", + "%129 = round_bit_pattern(%128, lsbs_to_remove=5, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-1248, 864] @ /layer3/layer3.0/downsample/downsample.0/Conv.conv_rounding\n", + "%130 = subgraph(%125) # EncryptedTensor ∈ [-31, 31]\n", + "%131 = subgraph(%129) # EncryptedTensor ∈ [-32, 31]\n", + "%132 = subgraph(%130) # EncryptedTensor ∈ [-99, 137]\n", + "%133 = subgraph(%131) # EncryptedTensor ∈ [-58, 36]\n", + "%134 = 1 # ClearScalar ∈ [1, 1]\n", + "%135 = multiply(%134, %133) # EncryptedTensor ∈ [-58, 36]\n", + "%136 = add(%132, %135) # EncryptedTensor ∈ [-113, 149]\n", + "%137 = subgraph(%136) # EncryptedTensor ∈ [0, 63]\n", + "%138 = ones() # EncryptedTensor ∈ [1, 1]\n", + "%139 = 0 # ClearScalar ∈ [0, 0]\n", + "%140 = multiply(%138, %139) # EncryptedTensor ∈ [0, 0]\n", + "%141 = (%140[:, :, 1:15, 1:15] = %137) # EncryptedTensor ∈ [0, 63]\n", + "%142 = [[[[ 3 3 ... 0 2]]]] # ClearTensor ∈ [-26, 31] @ /layer3/layer3.1/conv1/Conv.conv\n", + "%143 = conv2d(%141, %142, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4838, 3516] @ /layer3/layer3.1/conv1/Conv.conv\n", + "%144 = round_bit_pattern(%143, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4864, 3456] @ /layer3/layer3.1/conv1/Conv.conv_rounding\n", + "%145 = subgraph(%144) # EncryptedTensor ∈ [0, 61]\n", + "%146 = ones() # EncryptedTensor ∈ [1, 1]\n", + "%147 = 0 # ClearScalar ∈ [0, 0]\n", + "%148 = multiply(%146, %147) # EncryptedTensor ∈ [0, 0]\n", + "%149 = (%148[:, :, 1:15, 1:15] = %145) # EncryptedTensor ∈ [0, 61]\n", + "%150 = [[[[-2 -1 ... -4 -2]]]] # ClearTensor ∈ [-31, 23] @ /layer3/layer3.1/conv2/Conv.conv\n", + "%151 = conv2d(%149, %150, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3753, 2990] @ /layer3/layer3.1/conv2/Conv.conv\n", + "%152 = round_bit_pattern(%151, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3776, 3008] @ /layer3/layer3.1/conv2/Conv.conv_rounding\n", + "%153 = subgraph(%152) # EncryptedTensor ∈ [-31, 29]\n", + "%154 = subgraph(%136) # EncryptedTensor ∈ [-32, 31]\n", + "%155 = subgraph(%153) # EncryptedTensor ∈ [-124, 102]\n", + "%156 = subgraph(%154) # EncryptedTensor ∈ [0, 130]\n", + "%157 = 1 # ClearScalar ∈ [1, 1]\n", + "%158 = multiply(%157, %156) # EncryptedTensor ∈ [0, 130]\n", + "%159 = add(%155, %158) # EncryptedTensor ∈ [-124, 137]\n", + "%160 = subgraph(%159) # EncryptedTensor ∈ [0, 63]\n", + "%161 = ones() # EncryptedTensor ∈ [1, 1]\n", + "%162 = 0 # ClearScalar ∈ [0, 0]\n", + "%163 = multiply(%161, %162) # EncryptedTensor ∈ [0, 0]\n", + "%164 = (%163[:, :, 1:15, 1:15] = %160) # EncryptedTensor ∈ [0, 63]\n", + "%165 = [[[[-1 -1 ... 1 0]]]] # ClearTensor ∈ [-16, 31] @ /layer4/layer4.0/conv1/Conv.conv\n", + "%166 = conv2d(%164, %165, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3987, 2570] @ /layer4/layer4.0/conv1/Conv.conv\n", + "%167 = round_bit_pattern(%166, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3968, 2560] @ /layer4/layer4.0/conv1/Conv.conv_rounding\n", + "%168 = subgraph(%167) # EncryptedTensor ∈ [0, 56]\n", + "%169 = ones() # EncryptedTensor ∈ [1, 1]\n", + "%170 = 0 # ClearScalar ∈ [0, 0]\n", + "%171 = multiply(%169, %170) # EncryptedTensor ∈ [0, 0]\n", + "%172 = (%171[:, :, 1:8, 1:8] = %168) # EncryptedTensor ∈ [0, 56]\n", + "%173 = [[[[ 0 -1 ... 0 1]]]] # ClearTensor ∈ [-19, 31] @ /layer4/layer4.0/conv2/Conv.conv\n", + "%174 = conv2d(%172, %173, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3351, 2455] @ /layer4/layer4.0/conv2/Conv.conv\n", + "%175 = round_bit_pattern(%174, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3328, 2432] @ /layer4/layer4.0/conv2/Conv.conv_rounding\n", + "%176 = subgraph(%159) # EncryptedTensor ∈ [0, 63]\n", + "%177 = [[[[ 0]] ... [[ 0]]]] # ClearTensor ∈ [-26, 31] @ /layer4/layer4.0/downsample/downsample.0/Conv.conv\n", + "%178 = conv2d(%176, %177, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-954, 1167] @ /layer4/layer4.0/downsample/downsample.0/Conv.conv\n", + "%179 = round_bit_pattern(%178, lsbs_to_remove=5, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-960, 1152] @ /layer4/layer4.0/downsample/downsample.0/Conv.conv_rounding\n", + "%180 = subgraph(%175) # EncryptedTensor ∈ [-31, 26]\n", + "%181 = subgraph(%179) # EncryptedTensor ∈ [-31, 31]\n", + "%182 = subgraph(%180) # EncryptedTensor ∈ [-101, 98]\n", + "%183 = subgraph(%181) # EncryptedTensor ∈ [-62, 58]\n", + "%184 = 1 # ClearScalar ∈ [1, 1]\n", + "%185 = multiply(%184, %183) # EncryptedTensor ∈ [-62, 58]\n", + "%186 = add(%182, %185) # EncryptedTensor ∈ [-119, 124]\n", + "%187 = subgraph(%186) # EncryptedTensor ∈ [0, 57]\n", + "%188 = ones() # EncryptedTensor ∈ [1, 1]\n", + "%189 = 0 # ClearScalar ∈ [0, 0]\n", + "%190 = multiply(%188, %189) # EncryptedTensor ∈ [0, 0]\n", + "%191 = (%190[:, :, 1:8, 1:8] = %187) # EncryptedTensor ∈ [0, 57]\n", + "%192 = [[[[-1 0 ... 0 0]]]] # ClearTensor ∈ [-19, 31] @ /layer4/layer4.1/conv1/Conv.conv\n", + "%193 = conv2d(%191, %192, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4430, 2208] @ /layer4/layer4.1/conv1/Conv.conv\n", + "%194 = round_bit_pattern(%193, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4480, 2176] @ /layer4/layer4.1/conv1/Conv.conv_rounding\n", + "%195 = subgraph(%194) # EncryptedTensor ∈ [0, 63]\n", + "%196 = ones() # EncryptedTensor ∈ [1, 1]\n", + "%197 = 0 # ClearScalar ∈ [0, 0]\n", + "%198 = multiply(%196, %197) # EncryptedTensor ∈ [0, 0]\n", + "%199 = (%198[:, :, 1:8, 1:8] = %195) # EncryptedTensor ∈ [0, 63]\n", + "%200 = [[[[ 0 1 ... 1 1]]]] # ClearTensor ∈ [-20, 31] @ /layer4/layer4.1/conv2/Conv.conv\n", + "%201 = conv2d(%199, %200, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4275, 8105] @ /layer4/layer4.1/conv2/Conv.conv\n", + "%202 = round_bit_pattern(%201, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4224, 8064] @ /layer4/layer4.1/conv2/Conv.conv_rounding\n", + "%203 = subgraph(%202) # EncryptedTensor ∈ [-32, 31]\n", + "%204 = subgraph(%186) # EncryptedTensor ∈ [-32, 25]\n", + "%205 = subgraph(%203) # EncryptedTensor ∈ [-79, 170]\n", + "%206 = subgraph(%204) # EncryptedTensor ∈ [0, 31]\n", + "%207 = 1 # ClearScalar ∈ [1, 1]\n", + "%208 = multiply(%207, %206) # EncryptedTensor ∈ [0, 31]\n", + "%209 = add(%205, %208) # EncryptedTensor ∈ [-79, 173]\n", + "%210 = subgraph(%209) # EncryptedTensor ∈ [0, 62]\n", + "%211 = [[[[1 1 1 ... 1 1 1]]]] # ClearTensor ∈ [0, 1] @ /avgpool/AveragePool.avgpool\n", + "%212 = conv2d(%210, %211, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [0, 1066] @ /avgpool/AveragePool.avgpool\n", + "%213 = round_bit_pattern(%212, lsbs_to_remove=4, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [0, 1072] @ /avgpool/AveragePool.avgpool_rounding\n", + "%214 = subgraph(%213) # EncryptedTensor ∈ [0, 252]\n", + "%215 = reshape(%214, newshape=(1, 512)) # EncryptedTensor ∈ [0, 252]\n", + "%216 = subgraph(%215) # EncryptedTensor ∈ [0, 62]\n", + "%217 = [[-1 -4 -1 ... 1 -2 3]] # ClearTensor ∈ [-12, 31] @ /fc/Gemm.matmul\n", + "%218 = matmul(%216, %217) # EncryptedTensor ∈ [-2482, 6604] @ /fc/Gemm.matmul\n", + "return %218\n", + "\n", + "Subgraphs:\n", + "\n", + " %8 = subgraph(%7):\n", + "\n", + " %0 = input # EncryptedTensor @ /conv1/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[ -12]] ... [[ 12]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0009595604707281525 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 2.300 ... 19e-01]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = maximum(%9, %10) # EncryptedTensor\n", + " %12 = 0.09438851724215917 # ClearScalar\n", + " %13 = divide(%11, %12) # EncryptedTensor\n", + " %14 = 0 # ClearScalar\n", + " %15 = add(%13, %14) # EncryptedTensor\n", + " %16 = rint(%15) # EncryptedTensor\n", + " %17 = 0 # ClearScalar\n", + " %18 = 63 # ClearScalar\n", + " %19 = clip(%16, %17, %18) # EncryptedTensor\n", + " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", + " return %20\n", + "\n", + " %14 = subgraph(%13):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = subtract(%1, %2) # EncryptedTensor\n", + " %4 = 0.09438851724215917 # ClearScalar\n", + " %5 = multiply(%3, %4) # EncryptedTensor\n", + " %6 = 0.09438851523020911 # ClearScalar\n", + " %7 = divide(%5, %6) # EncryptedTensor\n", + " %8 = 0 # ClearScalar\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = rint(%9) # EncryptedTensor\n", + " %11 = 0 # ClearScalar\n", + " %12 = 63 # ClearScalar\n", + " %13 = clip(%10, %11, %12) # EncryptedTensor\n", + " %14 = astype(%13, dtype=int_) # EncryptedTensor\n", + " return %14\n", + "\n", + " %22 = subgraph(%21):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer1/layer1.0/conv1/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0011402867925128443 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 3.686 ... 25e-01]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = maximum(%9, %10) # EncryptedTensor\n", + " %12 = 0.04016368453965985 # ClearScalar\n", + " %13 = divide(%11, %12) # EncryptedTensor\n", + " %14 = 0 # ClearScalar\n", + " %15 = add(%13, %14) # EncryptedTensor\n", + " %16 = rint(%15) # EncryptedTensor\n", + " %17 = 0 # ClearScalar\n", + " %18 = 63 # ClearScalar\n", + " %19 = clip(%16, %17, %18) # EncryptedTensor\n", + " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", + " return %20\n", + "\n", + " %30 = subgraph(%29):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer1/layer1.0/conv2/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0009986261721062137 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 1.030 ... 10e-01]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.13032058768807805 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = 2 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %31 = subgraph(%13):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = subtract(%1, %2) # EncryptedTensor\n", + " %4 = 0.09438851724215917 # ClearScalar\n", + " %5 = multiply(%3, %4) # EncryptedTensor\n", + " %6 = 0.09438851523020911 # ClearScalar\n", + " %7 = divide(%5, %6) # EncryptedTensor\n", + " %8 = -32 # ClearScalar\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = rint(%9) # EncryptedTensor\n", + " %11 = -32 # ClearScalar\n", + " %12 = 31 # ClearScalar\n", + " %13 = clip(%10, %11, %12) # EncryptedTensor\n", + " %14 = astype(%13, dtype=int_) # EncryptedTensor\n", + " return %14\n", + "\n", + " %32 = subgraph(%30):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 2.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.13032058768807805 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.042009954818961026 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %33 = subgraph(%31):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = -32.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.09438851523020911 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.042009954818961026 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %37 = subgraph(%36):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.042009954818961026 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.09935687727024116 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %45 = subgraph(%44):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer1/layer1.1/conv1/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0008972804642336202 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 0.203 ... 52077 ]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = maximum(%9, %10) # EncryptedTensor\n", + " %12 = 0.05179850536952156 # ClearScalar\n", + " %13 = divide(%11, %12) # EncryptedTensor\n", + " %14 = 0 # ClearScalar\n", + " %15 = add(%13, %14) # EncryptedTensor\n", + " %16 = rint(%15) # EncryptedTensor\n", + " %17 = 0 # ClearScalar\n", + " %18 = 63 # ClearScalar\n", + " %19 = clip(%16, %17, %18) # EncryptedTensor\n", + " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", + " return %20\n", + "\n", + " %53 = subgraph(%52):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer1/layer1.1/conv2/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0017501061706694404 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[-0.113 ... 587016]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.24257694819947365 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = 6 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %54 = subgraph(%36):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.042009954818961026 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.09935687727024116 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = -32 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = -32 # ClearScalar\n", + " %13 = 31 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %55 = subgraph(%53):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 6.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.24257694819947365 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.06837324171233662 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %56 = subgraph(%54):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = -32.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.09935687727024116 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.06837324171233662 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %60 = subgraph(%59):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.06837324171233662 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.14000235207764164 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %68 = subgraph(%67):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer2/layer2.0/conv1/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0009605777806920079 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[-1.222 ... 35e-02]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = maximum(%9, %10) # EncryptedTensor\n", + " %12 = 0.04240615494644075 # ClearScalar\n", + " %13 = divide(%11, %12) # EncryptedTensor\n", + " %14 = 0 # ClearScalar\n", + " %15 = add(%13, %14) # EncryptedTensor\n", + " %16 = rint(%15) # EncryptedTensor\n", + " %17 = 0 # ClearScalar\n", + " %18 = 63 # ClearScalar\n", + " %19 = clip(%16, %17, %18) # EncryptedTensor\n", + " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", + " return %20\n", + "\n", + " %76 = subgraph(%59):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.06837324171233662 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.14000235207764164 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %80 = subgraph(%75):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer2/layer2.0/conv2/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0009905944290260569 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 3.249 ... 09e-02]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.1534375406464771 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = -10 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %81 = subgraph(%79):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer2/layer2.0/downsample/downsample.0/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.003126346405717952 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 0.184 ... 109817]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.1084907392607087 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = 2 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %82 = subgraph(%80):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = -10.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.1534375406464771 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.04733662649264896 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %83 = subgraph(%81):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 2.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.1084907392607087 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.04733662649264896 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %87 = subgraph(%86):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.04733662649264896 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.11045212848284759 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %95 = subgraph(%94):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer2/layer2.1/conv1/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0011082646273757157 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 5.801 ... 23e-01]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = maximum(%9, %10) # EncryptedTensor\n", + " %12 = 0.05601460751206594 # ClearScalar\n", + " %13 = divide(%11, %12) # EncryptedTensor\n", + " %14 = 0 # ClearScalar\n", + " %15 = add(%13, %14) # EncryptedTensor\n", + " %16 = rint(%15) # EncryptedTensor\n", + " %17 = 0 # ClearScalar\n", + " %18 = 63 # ClearScalar\n", + " %19 = clip(%16, %17, %18) # EncryptedTensor\n", + " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", + " return %20\n", + "\n", + " %103 = subgraph(%102):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer2/layer2.1/conv2/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0015848130037245337 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[-1.135 ... 16e-02]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.1418038087531616 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = 1 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %104 = subgraph(%86):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.04733662649264896 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.11045212848284759 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = -32 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = -32 # ClearScalar\n", + " %13 = 31 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %105 = subgraph(%103):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 1.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.1418038087531616 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.04268310452940874 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %106 = subgraph(%104):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = -32.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.11045212848284759 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.04268310452940874 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %110 = subgraph(%109):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.04268310452940874 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.09959391056862038 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %118 = subgraph(%117):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer3/layer3.0/conv1/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0007572907035253597 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[-2.011 ... 45e-01]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = maximum(%9, %10) # EncryptedTensor\n", + " %12 = 0.04280234014005014 # ClearScalar\n", + " %13 = divide(%11, %12) # EncryptedTensor\n", + " %14 = 0 # ClearScalar\n", + " %15 = add(%13, %14) # EncryptedTensor\n", + " %16 = rint(%15) # EncryptedTensor\n", + " %17 = 0 # ClearScalar\n", + " %18 = 63 # ClearScalar\n", + " %19 = clip(%16, %17, %18) # EncryptedTensor\n", + " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", + " return %20\n", + "\n", + " %126 = subgraph(%109):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.04268310452940874 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.09959391056862038 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %130 = subgraph(%125):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer3/layer3.0/conv2/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0007786148656779211 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 0.159 ... 642246]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.11637498747540001 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = -5 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %131 = subgraph(%129):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer3/layer3.0/downsample/downsample.0/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0013113023333722424 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 0.036 ... 50447 ]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.045917581353283936 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = 7 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %132 = subgraph(%130):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = -5.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.11637498747540001 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.030653778146597723 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %133 = subgraph(%131):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 7.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.045917581353283936 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.030653778146597723 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %137 = subgraph(%136):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.030653778146597723 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.07103891443497251 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %145 = subgraph(%144):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer3/layer3.1/conv1/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0006241869374887713 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[-7.669 ... 21e-02]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = maximum(%9, %10) # EncryptedTensor\n", + " %12 = 0.035695123544226944 # ClearScalar\n", + " %13 = divide(%11, %12) # EncryptedTensor\n", + " %14 = 0 # ClearScalar\n", + " %15 = add(%13, %14) # EncryptedTensor\n", + " %16 = rint(%15) # EncryptedTensor\n", + " %17 = 0 # ClearScalar\n", + " %18 = 63 # ClearScalar\n", + " %19 = clip(%16, %17, %18) # EncryptedTensor\n", + " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", + " return %20\n", + "\n", + " %153 = subgraph(%152):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer3/layer3.1/conv2/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.001117051080468814 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 1.028 ... 86e-02]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.12919705453311078 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = 2 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %154 = subgraph(%136):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.030653778146597723 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.07103891443497251 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = -32 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = -32 # ClearScalar\n", + " %13 = 31 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %155 = subgraph(%153):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 2.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.12919705453311078 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.0343202671787269 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %156 = subgraph(%154):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = -32.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.07103891443497251 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.0343202671787269 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %160 = subgraph(%159):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.0343202671787269 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.07299866352300644 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %168 = subgraph(%167):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer4/layer4.0/conv1/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0007102601305192167 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[-0.113 ... 820322]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = maximum(%9, %10) # EncryptedTensor\n", + " %12 = 0.03581596196540163 # ClearScalar\n", + " %13 = divide(%11, %12) # EncryptedTensor\n", + " %14 = 0 # ClearScalar\n", + " %15 = add(%13, %14) # EncryptedTensor\n", + " %16 = rint(%15) # EncryptedTensor\n", + " %17 = 0 # ClearScalar\n", + " %18 = 63 # ClearScalar\n", + " %19 = clip(%16, %17, %18) # EncryptedTensor\n", + " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", + " return %20\n", + "\n", + " %176 = subgraph(%159):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.0343202671787269 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.07299866352300644 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %180 = subgraph(%175):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer4/layer4.0/conv2/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0013214642859977128 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 0.483 ... 12071 ]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.1327200367532172 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = -2 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %181 = subgraph(%179):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer4/layer4.0/downsample/downsample.0/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0023505757716739957 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[-3.347 ... 96e-01]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.0739922691886633 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = 1 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %182 = subgraph(%180):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = -2.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.1327200367532172 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.038004591072633646 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %183 = subgraph(%181):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 1.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.0739922691886633 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.038004591072633646 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %187 = subgraph(%186):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.038004591072633646 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.0820416569187012 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %195 = subgraph(%194):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer4/layer4.1/conv1/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0007761212732305896 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 0.235 ... 420937]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = maximum(%9, %10) # EncryptedTensor\n", + " %12 = 0.025757889498509705 # ClearScalar\n", + " %13 = divide(%11, %12) # EncryptedTensor\n", + " %14 = 0 # ClearScalar\n", + " %15 = add(%13, %14) # EncryptedTensor\n", + " %16 = rint(%15) # EncryptedTensor\n", + " %17 = 0 # ClearScalar\n", + " %18 = 63 # ClearScalar\n", + " %19 = clip(%16, %17, %18) # EncryptedTensor\n", + " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", + " return %20\n", + "\n", + " %203 = subgraph(%202):\n", + "\n", + " %0 = input # EncryptedTensor @ /layer4/layer4.1/conv2/Conv.conv_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0 # ClearScalar\n", + " %3 = add(%1, %2) # EncryptedTensor\n", + " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.0030313779134727974 # ClearScalar\n", + " %7 = multiply(%6, %5) # EncryptedTensor\n", + " %8 = [[[[ 0.465 ... 483366]]]] # ClearTensor\n", + " %9 = add(%7, %8) # EncryptedTensor\n", + " %10 = 0.5993932655712262 # ClearScalar\n", + " %11 = divide(%9, %10) # EncryptedTensor\n", + " %12 = -12 # ClearScalar\n", + " %13 = add(%11, %12) # EncryptedTensor\n", + " %14 = rint(%13) # EncryptedTensor\n", + " %15 = -32 # ClearScalar\n", + " %16 = 31 # ClearScalar\n", + " %17 = clip(%14, %15, %16) # EncryptedTensor\n", + " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", + " return %18\n", + "\n", + " %204 = subgraph(%186):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.038004591072633646 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.0820416569187012 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = -32 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = -32 # ClearScalar\n", + " %13 = 31 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %205 = subgraph(%203):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = -12.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.5993932655712262 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.15133961144301405 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %206 = subgraph(%204):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = -32.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.0820416569187012 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.15133961144301405 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", + " return %10\n", + "\n", + " %210 = subgraph(%209):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.15133961144301405 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0 # ClearScalar\n", + " %6 = maximum(%4, %5) # EncryptedTensor\n", + " %7 = 0.4227900256185789 # ClearScalar\n", + " %8 = divide(%6, %7) # EncryptedTensor\n", + " %9 = 0 # ClearScalar\n", + " %10 = add(%8, %9) # EncryptedTensor\n", + " %11 = rint(%10) # EncryptedTensor\n", + " %12 = 0 # ClearScalar\n", + " %13 = 63 # ClearScalar\n", + " %14 = clip(%11, %12, %13) # EncryptedTensor\n", + " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", + " return %15\n", + "\n", + " %214 = subgraph(%213):\n", + "\n", + " %0 = input # EncryptedTensor @ /avgpool/AveragePool.avgpool_rounding\n", + " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", + " %2 = 0.02040816326530612 # ClearScalar\n", + " %3 = multiply(%1, %2) # EncryptedTensor\n", + " %4 = 0 # ClearScalar\n", + " %5 = subtract(%3, %4) # EncryptedTensor\n", + " %6 = 0.4227900256185789 # ClearScalar\n", + " %7 = multiply(%5, %6) # EncryptedTensor\n", + " %8 = 0.03665712827162568 # ClearScalar\n", + " %9 = divide(%7, %8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = add(%9, %10) # EncryptedTensor\n", + " %12 = rint(%11) # EncryptedTensor\n", + " %13 = 0 # ClearScalar\n", + " %14 = 255 # ClearScalar\n", + " %15 = clip(%12, %13, %14) # EncryptedTensor\n", + " %16 = astype(%15, dtype=int_) # EncryptedTensor\n", + " return %16\n", + "\n", + " %216 = subgraph(%215):\n", + "\n", + " %0 = input # EncryptedTensor\n", + " %1 = 0.0 # ClearScalar\n", + " %2 = subtract(%0, %1) # EncryptedTensor\n", + " %3 = 0.03665712827162568 # ClearScalar\n", + " %4 = multiply(%3, %2) # EncryptedTensor\n", + " %5 = 0.14837409062324677 # ClearScalar\n", + " %6 = divide(%4, %5) # EncryptedTensor\n", + " %7 = 0 # ClearScalar\n", + " %8 = add(%6, %7) # EncryptedTensor\n", + " %9 = rint(%8) # EncryptedTensor\n", + " %10 = 0 # ClearScalar\n", + " %11 = 63 # ClearScalar\n", + " %12 = clip(%9, %10, %11) # EncryptedTensor\n", + " %13 = astype(%12, dtype=int_) # EncryptedTensor\n", + " return %13\n" + ] + } + ], + "source": [ + "print(q_module.fhe_circuit)" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "weight_matrix = q_module.quant_layers_dict['193'][1].constant_inputs[1].qvalues\n", + "\n", + "np.savetxt(\"weight_matrix_1000x512.csv\", weight_matrix, delimiter=\",\")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2856957" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "open(\"resnet18.mlir\", \"w\").write(q_module.fhe_circuit.mlir)\n", + "open(\"resnet18.graph\", \"w\").write(q_module.fhe_circuit.graph.format(maximum_constant_length=10000))" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "# Get quantized input\n", + "quantized_input = q_module.quantize_input(images.detach().numpy()[:1])\n", + "expected_quantized_output = q_module.fhe_circuit.graph(quantized_input)\n", + "\n", + "# Save input / output to disk\n", + "import numpy as np\n", + "\n", + "# Save quantized input and expected output to disk\n", + "np.save(\"quantized_input.npy\", quantized_input)\n", + "np.save(\"expected_quantized_output.npy\", expected_quantized_output)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[[[-24, -24, -24, ..., -27, -27, -27],\n", + " [-23, -24, -24, ..., -27, -27, -26],\n", + " [-23, -24, -24, ..., -27, -26, -26],\n", + " ...,\n", + " [-26, -25, -25, ..., -26, -26, -26],\n", + " [-26, -25, -25, ..., -26, -25, -25],\n", + " [-26, -25, -25, ..., -25, -25, -25]],\n", + "\n", + " [[-21, -21, -20, ..., -26, -25, -25],\n", + " [-20, -20, -20, ..., -25, -25, -25],\n", + " [-20, -20, -21, ..., -25, -25, -24],\n", + " ...,\n", + " [-24, -22, -22, ..., -24, -24, -23],\n", + " [-24, -23, -23, ..., -24, -23, -23],\n", + " [-24, -24, -23, ..., -24, -23, -23]],\n", + "\n", + " [[-19, -19, -19, ..., -23, -23, -22],\n", + " [-18, -19, -19, ..., -23, -22, -22],\n", + " [-18, -19, -19, ..., -22, -22, -22],\n", + " ...,\n", + " [-21, -20, -20, ..., -21, -21, -21],\n", + " [-21, -21, -21, ..., -21, -21, -21],\n", + " [-21, -21, -21, ..., -21, -21, -21]]]])" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "quantized_input" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Accuracy of the FHEResNet18 model on the images: 86.36363636%\n", + "Top-5 Accuracy of the FHEResNet18 model on the images: 95.45454545%\n" + ] + } + ], + "source": [ + "with torch.no_grad():\n", + " outputs = q_module.forward(images.detach().numpy(), fhe=\"disable\")\n", + " outputs = torch.from_numpy(outputs)\n", + " _, predicted = torch.max(outputs, 1)\n", + "\n", + "accuracy = calculate_accuracy(predicted, labels)\n", + "print(f\"Accuracy of the FHEResNet18 model on the images: {accuracy:.8f}%\")\n", + "\n", + "topk_accuracy = calculate_topk_accuracy(outputs, labels, topk=5)\n", + "print(f\"Top-5 Accuracy of the FHEResNet18 model on the images: {topk_accuracy:.8f}%\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Accuracy of the FHEResNet18 model on the images: 86.36363636%\n", + "Top-5 Accuracy of the FHEResNet18 model on the images: 95.45454545%\n" + ] + } + ], + "source": [ + "with torch.no_grad():\n", + " outputs = q_module.forward(images.detach().numpy(), fhe=\"simulate\")\n", + " outputs = torch.from_numpy(outputs)\n", + " _, predicted = torch.max(outputs, 1)\n", + "\n", + "accuracy = calculate_accuracy(predicted, labels)\n", + "print(f\"Accuracy of the FHEResNet18 model on the images: {accuracy:.8f}%\")\n", + "\n", + "topk_accuracy = calculate_topk_accuracy(outputs, labels, topk=5)\n", + "print(f\"Top-5 Accuracy of the FHEResNet18 model on the images: {topk_accuracy:.8f}%\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import time\n", + "\n", + "# Run a single example with fhe=execute to check the time\n", + "q_module.fhe_circuit.keygen()\n", + "\n", + "start = time.time()\n", + "outputs = q_module.forward(images.detach().numpy(), fhe=\"execute\")\n", + "end = time.time()\n", + "\n", + "print(f\"Time taken for one fhe execution: {end - start} seconds\")" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "139496591" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Save the FHE circuit to an MLIR file\n", + "# open(\"resnet.mlir\", \"w\").write(q_module.fhe_circuit.mlir)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_4331/766114212.py:51: UserWarning: FixedFormatter should only be used together with FixedLocator\n", + " ax.set_xticklabels([\"\"] + rounding_threshold_bits_range, rotation=45)\n", + "/tmp/ipykernel_4331/766114212.py:52: UserWarning: FixedFormatter should only be used together with FixedLocator\n", + " ax.set_yticklabels([\"\"] + list(n_bits_range))\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAwcAAALECAYAAABdU0zhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAACsY0lEQVR4nOzdd3wU1frH8e+m904SSiAIht6boSsIIiCoVL3SFBXFxvWHIooVEQuKDS4oghcUC6BcUJBqRXrvvQQSCKkkpO3O7w9gYc1Gs0h2E/N5v17zgpx9ZubMkwT27HPOjMkwDEMAAAAAyj03V3cAAAAAQOnA4AAAAACAJAYHAAAAAC5icAAAAABAEoMDAAAAABcxOAAAAAAgicEBAAAAgIsYHAAAAACQxOAAAAAAwEUMDgCUaUlJSerTp4/Cw8NlMpn0zjvvuLpLV+X1119X7dq1ZbFY/jTuyJEjMplMmjlzprXthRdekMlkKpF+XTrfm2+++Zexf6cfAwYMUL9+/a5qXwDAtcPgAChBH374oUwmk1q1auXqrvxjPfHEE1q6dKnGjBmj//73v7rllluKjDWZTHa36Ohoa8ylN7jJycl2jxEbG6sePXoU67gmk0kPPvjgX15DRkaGJk6cqKeeekpubuXzn+WnnnpK8+bN09atW13dlSJ17NixyO/znj17JEmrV6+2aff09NR1112nQYMG6dChQ9ZjnT9/Xvfee6/q16+v4OBgBQQEqFGjRpo8ebLy8/NtzrtixQoNGzZMcXFx8vPz03XXXaf77rtPp06dcur1AygfPFzdAeCfbM6cOYqNjdW6det04MAB1axZ09Vd+sdZuXKlevXqpSeffLJY8TfffLMGDRpk0+br6/u3+2HvuJIUFxf3l/vOmDFDBQUFGjhw4FWd+9lnn9XTTz99VfuWFk2aNFHz5s311ltv6dNPP3V1d4pUpUoVTZgwoVB7pUqVbL5+9NFH1aJFC+Xn52vTpk2aNm2aFi9erO3bt6tSpUo6f/68du7cqVtvvVWxsbFyc3PTb7/9pieeeEJr167VZ599Zj3WU089pZSUFPXt21fXX3+9Dh06pPfff1+LFi3Sli1bbAa3APB3MTgASsjhw4f122+/af78+XrggQc0Z84cPf/8867ull1ZWVny9/d3dTeuyunTpxUSElLs+Li4OP3rX/+65v34O8f95JNPdNttt8nHx+eq9vfw8JCHR9n/57xfv356/vnn9eGHHyogIMDV3bErODi4WN/ndu3aqU+fPpKkoUOHKi4uTo8++qhmzZqlMWPGKCwsTL///rvNPg8++KCCg4P1/vvva9KkSdY3/ZMmTVLbtm1tqkq33HKLOnTooPfff1+vvPLKNbxCAOVd+axfA04wZ84chYaGqnv37urTp4/mzJljNy4tLU1PPPGEYmNj5e3trSpVqmjQoEE201pycnL0wgsvKC4uTj4+PqpYsaLuuOMOHTx4UNLlqQyrV6+2Oba9+elDhgxRQECADh48qFtvvVWBgYG6++67JUk///yz+vbtq6pVq8rb21sxMTF64okndP78+UL93rNnj/r166cKFSrI19dXtWrV0tixYyVJq1atkslk0oIFCwrt99lnn8lkMmnNmjV/mr9Dhw6pb9++CgsLk5+fn2644QYtXrzY+vrMmTNlMplkGIY++OAD6zSOsubw4cPatm2bOnfuXOi1tLQ0DRkyRMHBwQoJCdHgwYOVlpZWKM7eXP9ly5apbdu2CgkJUUBAgGrVqqVnnnnG+npeXp7GjRunZs2aKTg4WP7+/mrXrp1WrVpVZF/ffvttVatWTb6+vurQoYN27NhRrGucPXu2mjVrJl9fX4WFhWnAgAE6fvx4obibb75ZWVlZWrZs2Z8er379+rrxxhsLtVssFlWuXNn6plyS5s6dq2bNmikwMFBBQUFq0KCBJk+eXKx+X0s33XSTpAvf7z8TGxsrSTbf5/bt2xeabta+fXuFhYVp9+7d17SfAFD2P2oCSqk5c+bojjvukJeXlwYOHKgpU6Zo/fr1atGihTXm3LlzateunXbv3q1hw4apadOmSk5O1sKFC3XixAlFRETIbDarR48eWrFihQYMGKDHHntMmZmZWrZsmXbs2KEaNWo43LeCggJ17dpVbdu21Ztvvik/Pz9J0ldffaXs7GyNGDFC4eHhWrdund577z2dOHFCX331lXX/bdu2qV27dvL09NT999+v2NhYHTx4UP/73/80fvx4dezYUTExMZozZ45uv/32QnmpUaOG4uPji+xfUlKSWrdurezsbD366KMKDw/XrFmzdNttt+nrr7/W7bffrvbt2+u///2v7rnnniKn9NiTk5NTaD1BYGCgvL29bdpSUlLs7l/UgmF7x5WkoKAgeXl5Fdmf3377TZLUtGlTm3bDMNSrVy/98ssvevDBB1WnTh0tWLBAgwcPLvJYl+zcuVM9evRQw4YN9dJLL8nb21sHDhzQr7/+ao3JyMjQRx99pIEDB2r48OHKzMzUxx9/rK5du2rdunVq3LixzTE//fRTZWZm6uGHH1ZOTo4mT56sm266Sdu3b1dUVFSRfRk/fryee+459evXT/fdd5/OnDmj9957T+3bt9fmzZttqj5169aVr6+vfv3110I/N1fq37+/XnjhBSUmJtpMqfnll1908uRJDRgwQNKFAdLAgQPVqVMnTZw4UZK0e/du/frrr3rsscf+Mo/2mM3mQt9nHx+fv6x0XBrIh4eH27Tn5eUpIyND58+f14YNG/Tmm2+qWrVqfzkF8dy5czp37pwiIiKu4ioA4E8YAK65DRs2GJKMZcuWGYZhGBaLxahSpYrx2GOP2cSNGzfOkGTMnz+/0DEsFothGIYxY8YMQ5IxadKkImNWrVplSDJWrVpl8/rhw4cNScYnn3xibRs8eLAhyXj66acLHS87O7tQ24QJEwyTyWQcPXrU2ta+fXsjMDDQpu3K/hiGYYwZM8bw9vY20tLSrG2nT582PDw8jOeff77Qea70+OOPG5KMn3/+2dqWmZlpVK9e3YiNjTXMZrO1XZLx8MMP/+nxroy1t12Zn+eff77IuEtb9+7di3VcScbnn3/+p3169tlnDUlGZmamTfs333xjSDJef/11a1tBQYHRrl27Ivt8ydtvv21IMs6cOVPkeQsKCozc3FybttTUVCMqKsoYNmyYte3Sz5Cvr69x4sQJa/vatWsNScYTTzxRZD+OHDliuLu7G+PHj7c5z/bt2w0PD49C7YZhGHFxcUa3bt2K7LdhGMbevXsNScZ7771n0/7QQw8ZAQEB1p/jxx57zAgKCjIKCgr+9HjF1aFDB7vf48GDB1tjLv0uzpgxwzhz5oxx8uRJY/HixUZsbKxhMpmM9evX2xzz888/tzlW8+bNjW3btv1lX15++WVDkrFixYprcm0AcAmVA6AEzJkzR1FRUdapDyaTSf3799fs2bP11ltvyd3dXZI0b948NWrUyO6npJemicybN08RERF65JFHioy5GiNGjCjUduXC3KysLJ0/f16tW7eWYRjavHmzqlatqjNnzuinn37SY489pqpVqxbZn0GDBmnChAn6+uuvde+990qSvvjiCxUUFPzlnO3vvvtOLVu2VNu2ba1tAQEBuv/++zVmzBjt2rVL9evXv6rr7tWrl0aOHGnTVq9evUJx8+bNU1BQUKH2ovpu77iS1KBBgz/tz9mzZ+Xh4VHok+fvvvtOHh4eNt8nd3d3PfLII/r555//9JiXPo3/9ttvNXToULt3QHJ3d7f+HFosFqWlpclisah58+batGlTofjevXurcuXK1q9btmypVq1a6bvvvtOkSZPs9mP+/PmyWCzq16+fzaft0dHRuv7667Vq1SqbqU6SFBoaWuSdoi6Ji4tT48aN9cUXX1hzbjab9fXXX6tnz57Wn+OQkBDrNKU/u4uVI2JjYzV9+nSbtj8uRpakYcOG2XxdoUIFzZo1S82bN7dpv/HGG7Vs2TKlpaVpxYoV2rp1q7Kysv60Dz/99JNefPFF9evXzzpdCQCuFQYHwDVmNps1d+5c3XjjjTbzi1u1aqW33npLK1asUJcuXSRdmGpw5513/unxDh48qFq1al3TBaceHh6qUqVKofZjx45p3LhxWrhwoVJTU21eS09PlyTr7Rj/6s157dq11aJFC82ZM8c6OJgzZ45uuOGGv5wycfToUbu3f61Tp4719asdHFSpUsXu/P4/at++vd0pG0UtGi7ucYvr6NGjqlixYqFBQ61atf5y3/79++ujjz7Sfffdp6efflqdOnXSHXfcoT59+tgMFGbNmqW33npLe/bssbl9ZvXq1Qsd8/rrry/UFhcXpy+//LLIfuzfv1+GYdjdV5I8PT0LtRmGUaxBb//+/fXMM88oISFBlStX1urVq3X69Gn179/fGvPQQw/pyy+/VLdu3VS5cmV16dJF/fr1+1sDBX9//2J9n8eNG6d27drJ3d1dERERqlOnjt3f4aioKOu0rD59+ujVV1/VzTffrP3799u9C9GePXt0++23q379+vroo4+u+joAoCgsSAausZUrV+rUqVOaO3eurr/+eut26QFPRS1M/juKejNlNpvttnt7exf6NNlsNuvmm2/W4sWL9dRTT+mbb77RsmXLrIuZ/+rhXPYMGjRIP/74o06cOKGDBw/q999/L5E7BZVl4eHhKigoUGZm5jU7pq+vr3766SctX75c99xzj7Zt26b+/fvr5ptvtv5MzJ49W0OGDFGNGjX08ccfa8mSJVq2bJluuummq/pe22OxWGQymazH/uP2n//8p9A+qampxZpH379/fxmGYV0L8+WXXyo4ONjmjX9kZKS2bNmihQsX6rbbbtOqVavUrVu3Yq3b+LsaNGigzp0768Ybb1SDBg2KPbjv06ePzp07p2+//bbQa8ePH1eXLl0UHBys7777ToGBgde62wBA5QC41ubMmaPIyEh98MEHhV6bP3++FixYoKlTp8rX11c1atT4yzu+1KhRQ2vXrlV+fr7dT1qlC1MxJBW6k83Ro0eL3e/t27dr3759mjVrls3i3j/eOea6666TpGLdqWbAgAEaNWqUPv/8c50/f16enp42n+wWpVq1atq7d2+h9ksPmqpWrdpfHqOsqF27tqQLd7Fp2LChtb1atWpasWKFzp07Z1M9sJcXe9zc3NSpUyd16tRJkyZN0quvvqqxY8dq1apV6ty5s77++mtdd911mj9/vs3gsqjb7e7fv79Q2759+6x317GnRo0aMgxD1atXL9bzHgoKCnT8+HHddtttfxlbvXp1tWzZ0jq1aP78+erdu3ehheVeXl7q2bOnevbsKYvFooceekj/+c9/9Nxzz5XK545cujPYpUrdJWfPnlWXLl2Um5urFStWqGLFiq7oHoBygMoBcA2dP39e8+fPV48ePdSnT59C28iRI5WZmamFCxdKku68805t3brV7i0/DcOwxiQnJ+v9998vMqZatWpyd3fXTz/9ZPP6hx9+WOy+X5p/fumYl/7+x9s+VqhQQe3bt9eMGTN07Ngxu/25JCIiQt26ddPs2bM1Z84c3XLLLcX6VPjWW2/VunXrbG53mpWVpWnTpik2NlZ169Yt9nWVdpfu2rRhwwab9ltvvVUFBQWaMmWKtc1sNuu99977y2Pau9PSpbsP5ebmSrL//V67dm2Rt5j95ptvlJCQYP163bp1Wrt2rbp161ZkP+644w65u7vrxRdfLPSzYRiGzp49a9O2a9cu5eTkqHXr1n9ydZf1799fv//+u2bMmKHk5ORCA88/Ht/Nzc06ALuUh/z8fO3Zs8fpTxtOTk4ulBNJ1qlCV65NyMrK0q233qqEhAR99913RU7TAoBrgcoBcA0tXLhQmZmZRX7yecMNN6hChQqaM2eO+vfvr//7v//T119/rb59+2rYsGFq1qyZUlJStHDhQk2dOlWNGjXSoEGD9Omnn2rUqFFat26d2rVrp6ysLC1fvlwPPfSQevXqpeDgYPXt21fvvfeeTCaTatSooUWLFun06dPF7nvt2rVVo0YNPfnkk0pISFBQUJDmzZtXaO2BJL377rtq27atmjZtqvvvv1/Vq1fXkSNHtHjxYm3ZssUmdtCgQdb7zr/88svF6svTTz+tzz//XN26ddOjjz6qsLAwzZo1S4cPH9a8efPsLrB1tX379mn27NmF2qOionTzzTcXud91112n+vXra/ny5TaLWHv27Kk2bdro6aef1pEjR1S3bl3Nnz+/0CfK9rz00kv66aef1L17d1WrVk2nT5/Whx9+qCpVqlgXeffo0UPz58/X7bffru7du+vw4cOaOnWq6tatq3PnzhU6Zs2aNdW2bVuNGDFCubm5eueddxQeHq7Ro0cX2Y8aNWrolVde0ZgxY3TkyBH17t1bgYGBOnz4sBYsWKD777/f5snWy5Ytk5+f35/m60r9+vXTk08+qSeffFJhYWGF1gLcd999SklJ0U033aQqVaro6NGjeu+999S4cWPr+pWEhATVqVNHgwcPtnkeSEmbPXu2pk6dqt69e+u6665TZmamli5dqmXLlqlnz542C43vvvturVu3TsOGDdPu3bttnm0QEBCg3r17O63fAMoBV9wiCfin6tmzp+Hj42NkZWUVGTNkyBDD09PTSE5ONgzDMM6ePWuMHDnSqFy5suHl5WVUqVLFGDx4sPV1w7hwi9GxY8ca1atXNzw9PY3o6GijT58+xsGDB60xZ86cMe68807Dz8/PCA0NNR544AFjx44ddm9l6u/vb7dvu3btMjp37mwEBAQYERERxvDhw42tW7cWOoZhGMaOHTuM22+/3QgJCTF8fHyMWrVqGc8991yhY+bm5hqhoaFGcHCwcf78+eKk0TAMwzh48KDRp08f6/FbtmxpLFq0qFCcHLyV6V/FXrodZ1G3Aa1WrZpDtzLt0KHDX/Zr0qRJNrfgvOTs2bPGPffcYwQFBRnBwcHGPffcY2zevPkvb2W6YsUKo1evXkalSpUMLy8vo1KlSsbAgQONffv2WWMsFovx6quvGtWqVTO8vb2NJk2aGIsWLTIGDx5sVKtWzRp36Vamb7zxhvHWW28ZMTExhre3t9GuXTtj69atdnP3R/PmzTPatm1r+Pv7G/7+/kbt2rWNhx9+2Ni7d69NXKtWrYx//etff5mvK7Vp08aQZNx3332FXvv666+NLl26GJGRkYaXl5dRtWpV44EHHjBOnTpV6PquvB1pUTp06GDUq1fvT2Mu3cr0q6+++tO49evXG3379jWqVq1qeHt7G/7+/kbTpk2NSZMmGfn5+Tax1apVK/Ln68rvFQBcCybDsFPXBIBrpKCgQJUqVVLPnj318ccfu7o7pVJ6erquu+46vf7669Y7O5U3W7ZsUdOmTbVp06ZCD2ADADhP6avNA/hH+eabb3TmzJliP8G4PAoODtbo0aP1xhtvXLM7BZU1r732mvr06cPAAABcjMoBgBKxdu1abdu2TS+//LIiIiLsPlgLAACULlQOAJSIKVOmaMSIEYqMjNSnn37q6u4AAIBioHIAAAAAQBKVAwAAAAAXMTgAAAAAIInBAQAAAICLGBwAAAAAkMTgAAAAAMBFDA5KADeAQkk6deqUdu3a5epulClms1kSv5uOyM7OVl5enqu7UeacOHFCmzdvdnU3AOCqMTi4RrKyspSZmamMjAyZTCZXd6dMSElJ0Z49e7R//37ehBRTQkKCGjRooGeffVYbNmxwdXfKhC1btqh3797Kzs7md7OYduzYoX79+un3339Xbm6uq7tTZuzcuVOtW7fW7NmzJancPu26uE6cOKEvv/xS8+fP1/bt213dHQAXMTi4Bnbt2qU77rhDHTp0UJ06dTRnzhxJfEr5Z3bs2KHOnTurX79+atCggV5//XXrp7so2v79+5Wenq709HS99957Nk8d5uetsK1bt6p169aqV6+e/Pz8rO3kqmg7d+5Uu3btVKVKFVWvXl3e3t6u7lKZsHXrVrVs2VIeHh767LPPdPr0abm58V9sUbZv3662bdvqjTfe0EMPPaSxY8fq4MGDru4WADE4+Nt27dql9u3bq169enryySc1YMAADR06VFu2bOFTyiLs2rVLHTt2VKdOnTR37lyNHz9e48aN08mTJ13dtVKvYcOGuvXWW9W/f3/t2LFDkyZN0s6dOyXxhvePtm3bpjZt2mjkyJF67bXXrO15eXn8bhYhKytLo0aN0sCBAzV16lTFxMRoz5492rJli44dO+bq7pVaW7duVXx8vB5//HGtW7dO4eHhmj59ugzD4PfSjqNHj6pbt24aOHCgVq9erU8++UTr16/X2bNnXd01AOIJyX9LSkqKBg4cqNq1a2vy5MnW9htvvFENGjTQu+++K8MweCNyheTkZN15551q0qSJ3nnnHUkX3tTeeuutGjdunHx9fRUeHq6YmBjXdrQUMpvNSklJUdu2bbVy5UqtW7dOEyZMUOPGjbVz505VrFhRX3/9tau7WSokJiaqSZMmatSokZYsWSKz2awnn3xS+/fv18GDB/XAAw/olltuUe3atV3d1VIlNzdXnTt31rvvvquGDRuqe/fu1ul/9erV03333ad7773X1d0sVbZt26aWLVvq3//+t8aPHy+LxaL+/fvr6NGjWrdunSTx/8AfTJs2TZ9//rlWrlxpzUv37t3Vq1cv+fj4KCYmRjfeeKOLewmUXx6u7kBZlp+fr7S0NPXp00fShfmlbm5uql69ulJSUiSJ/xD+wGQy6ZZbbrHmTJJeeeUVLV26VImJiUpOTla9evX07LPPqm3bti7saenj5uamChUqqEWLFtqxY4duv/12eXt7a/DgwcrNzdXw4cNd3cVSJT4+XsePH9e3336rqVOnKj8/X40bN1ZsbKzeffdd7dixQ+PGjVPVqlVd3dVSIy0tTXv37lVycrL+7//+T5L00Ucf6eTJk1q5cqWeffZZBQcH2/z+lne5ubkaPXq0XnrpJev/Aa+88opatWqlKVOmaMSIEfw/8AeGYejYsWPasmWLmjRpovHjx+v7779XXl6e0tPTdfToUU2cOFFDhgxxdVeBcolpRX9DVFSUZs+erXbt2km6fEeUypUrF5preu7cOaf3rzQKDw/XyJEjdf3110uS5s6dq+eff15z587VihUrNGfOHKWkpGjFihUu7mnpc+kNhru7u1avXi1Jmj9/vsxms2JiYvTzzz9bP6ks76Kjo/XBBx+obt26GjhwoMxms7744gu9+eabev/99/XKK69o3rx51ilZuCAyMlKdOnXSwoULtX//fj3xxBNq2LChbrnlFj366KPq3LmzVqxYIbPZzHSZi1q0aKGXXnpJ0oUBvGEYio6O1o033qjVq1eTKzu6dOmi6Oho9evXT3369NFzzz2nBQsW6IcfftCiRYs0YMAAzZo1S2fPniV3gAtQOfibLr3JtVgs8vT0lHThU5HTp09bYyZMmCBvb289+uij8vAg5YGBgda/x8fHa8OGDWratKkkqX379oqMjNTGjRtd1b1S69LUhJtuukmHDx/WQw89pO+++04bN27Uli1b9H//93/y8vJSw4YN5ePj4+ruulzFihU1YcIEVa5cWZ07d1Z4eLg1h3fddZeef/55rVq1St26dXN1V0sNk8mkf//73+rYsaOys7N1//33W1+rUqWKoqKitH79erm5ufFpeBFMJpOCg4N1zz33qE+fPnr00UfVpk0bV3erVKlevbpmz56t9evXa9euXTKZTOrVq5ekCwPUSpUq6ccff5S/vz8/Z4AL8E71Grn0idGlf8guVQ7GjRunV155RZs3b2ZgYEe1atVUrVo1SRcGWHl5eQoICFDDhg1d3LPS59LPVvXq1TV06FBFRUVp0aJFql69uqpXry6TyaRGjRoxMLhCpUqV9PTTT1tzYjKZZBiGUlJSVKFCBTVu3Ni1HSyFmjdvru+//14dOnTQtGnTdN1116levXqSLkyljIuLU0FBgfXDENjXo0cP3XzzzZoyZYqaNm0qX19fV3epVLn079ZHH32kDRs2KC8vT15eXpKkpKQkxcbGcgc7wEV4t3oNXRoceHh4KCYmRm+++aZef/11bdiwQY0aNXJ190o9Nzc3vfrqq1qzZo1efvllV3en1IqPj9dHH32k5s2bq2HDhtafu969e7u6a6VSUFCQzdcmk0nvvvuukpOT+US3CO3atdPq1as1cOBADRs2TA0aNFBeXp4WLlyoX375hYFBMXh5eenGG2/UhAkTlJ6ezuCgCK1bt9aTTz6pyZMnKzo6Wjt27NAnn3yin376Sf7+/q7uHlAuMTi4hi5VCzw9PTV9+nQFBQXpl19+sU6ZQdG++uor/fjjj5o7d66WLVtmna6Fwjw9PTVkyBDrzxtl9+KbO3euVq1apa+++korVqywVq1QWPv27bVy5UrNnj1bv//+u66//nr98ssvql+/vqu7VupdGrA/8MAD+vrrr5WTk+PqLpVadevW1YIFCzR8+HC5ubmpcuXK+vHHH9WgQQNXdw0ot7iVaQnYsGGDWrZsqR07dqhu3bqu7k6ZsHPnTr300kt64YUXVKdOHVd3B/9Q27Zt0zPPPKOJEydap8rgr1160i8P9XKMYRjKzs7mE/BiSElJUX5+vry9vRUSEuLq7gDlGoODEpKVlcV/CA7Kz89nugJK3JVzmwEAgC0GBwAAAAAk8ZwDAAAAABcxOAAAAAAgicEBAAAAgIsYHAAAAACQxOAAAAAAwEUMDkpIbm6uXnjhBeXm5rq6K2UGOXMcOXMcOXMcOXMcOXMcOXMcOUNJ4FamJSQjI0PBwcFKT09XUFCQq7tTJpAzx5Ezx5Ezx5Ezx5Ezx5Ezx5Gzf7affvpJb7zxhjZu3KhTp05pwYIF6t2795/us3r1ao0aNUo7d+5UTEyMnn32WQ0ZMsSh81I5AAAAAEqZrKwsNWrUSB988EGx4g8fPqzu3bvrxhtv1JYtW/T444/rvvvu09KlSx06r8fVdBYAAABAyenWrZu6detW7PipU6eqevXqeuuttyRJderU0S+//KK3335bXbt2LfZxyvTgwGKx6OTJkwoMDJTJZHJ1d2xkZGTY/Im/Rs4cR84cR84cR84cR84cR84cV1pzZhiGMjMzValSJbm5lb5JKjk5OcrLy3PJuQ3DKPSe1dvbW97e3n/72GvWrFHnzp1t2rp27arHH3/coeOU6cHByZMnFRMT4+pu/KnS3r/SiJw5jpw5jpw5jpw5jpw5jpw5rrTm7Pjx46pSpYqru2EjJydH1asFKPG02SXnDwgI0Llz52zann/+eb3wwgt/+9iJiYmKioqyaYuKilJGRobOnz8vX1/fYh2nTA8OAgMDJUltdas85Oni3gAAAKBA+fpF31nfp5UmeXl5Sjxt1tGNsQoKdG5VIyPTomrNjuj48eM2C8ivRdXgWirTg4NLZRkPecrDxOAAAADA5S7eB7O0Tfm+UkCgSQGBzu2fRRfOFxQUVCJ3l4qOjlZSUpJNW1JSkoKCgopdNZC4WxEAAABQ5sXHx2vFihU2bcuWLVN8fLxDxynTlQMAAADAUWbDIrOTn/RlNiwOxZ87d04HDhywfn348GFt2bJFYWFhqlq1qsaMGaOEhAR9+umnkqQHH3xQ77//vkaPHq1hw4Zp5cqV+vLLL7V48WKHzkvlAAAAAChlNmzYoCZNmqhJkyaSpFGjRqlJkyYaN26cJOnUqVM6duyYNb569epavHixli1bpkaNGumtt97SRx995NBtTCUqBwAAAECp07FjRxlG0eWNmTNn2t1n8+bNf+u8DA4AAABQrlhkyCLnzity9vmuFtOKAAAAAEiicgAAAIByxiKLHFsefG3OWRZQOQAAAAAgicEBAAAAgIuYVgQAAIByxWwYMv/JnYBK6pxlAZUDAAAAAJKoHAAAAKCc4VamRaNyAAAAAEASlQMAAACUMxYZMlM5sIvKAQAAAABJDA4AAAAAXMS0IgAAAJQrLEguGpUDAAAAAJKoHAAAAKCc4SFoRaNyAAAAAEASgwMAAAAAFzGtCAAAAOWK5eLm7HOWBVQOAAAAAEiicgAAAIByxuyCJyQ7+3xXi8oBAAAAAElUDgAAAFDOmI0Lm7PPWRZQOQAAAAAgicEBAAAAgIuYVgQAAIByhVuZFo3KAQAAAABJDA6K7baHuuq/hz7Q4uw5enfNq6rVouafxrfvc4M+3vWOFmfP0bStb6lltyaFYga/2F9zE6ZpUdYcTfzhOVWuGV1S3XcJcuY4cuY4cuY4cuY4cuY4cuY4cuY8FplkdvJmkcnVl10sLh0cTJgwQS1atFBgYKAiIyPVu3dv7d2715VdsqtDv9Z64K3Bmv3SVxrR7Ckd2nZUE5aMVUiFILvxdePj9Mxnj2vJjJUa0XS0fv12nV5YMFqx9WKsMf1H91LvR7pp8ohpeuSGMcrJytWEJc/K09vTWZdVosiZ48iZ48iZ48iZ48iZ48iZ48gZSguXDg5+/PFHPfzww/r999+1bNky5efnq0uXLsrKynJltwq584ke+v6jFVo6c7WO7T6hyQ9OU252nroOu8lu/O2Pdtf6JVv01ZsLdWxPgmaN+0IHNh1Sr5G3XI55rLvmjJ+nNQs36PD2Y5o4+H2FVwpVm94tnHVZJYqcOY6cOY6cOY6cOY6cOY6cOY6cobRw6eBgyZIlGjJkiOrVq6dGjRpp5syZOnbsmDZu3OjKbtnw8PRQXLPrtGn5NmubYRjatHyb6t4QZ3efuvFx2rRim03bhh+2qs7F+OjqkQqvGKrNy7dbX8/OyNaetQdUN75WCVyFc5Ezx5Ezx5Ezx5Ezx5Ezx5Ezx5Ez57MYrtnKglK15iA9PV2SFBYWZvf13NxcZWRk2GwlLTgiUO4e7kpNSrdpTz2drtDoELv7hEaHKO2P8UlpCrsYf+nP1KS0QjGhUfaPWZaQM8eRM8eRM8eRM8eRM8eRM8eRM5QmpWZwYLFY9Pjjj6tNmzaqX7++3ZgJEyYoODjYusXExNiNAwAAAIri7MXIl7ayoNQMDh5++GHt2LFDc+fOLTJmzJgxSk9Pt27Hjx8v8X6lJ2fKXGBWaFSwTXtoZLBSE9Ps7pOamKaQP8ZHhSjlYvylP/84cg+NCik0wi+LyJnjyJnjyJnjyJnjyJnjyJnjyBlKk1IxOBg5cqQWLVqkVatWqUqVKkXGeXt7KygoyGYraQX5Bdq38ZCadGpgbTOZTGrSqYF2/b7P7j671uxTk5sa2LQ17dxQuy/GJx4+rbOnUtWk0+UKiV+gr2q3qqlda0rf3ZocRc4cR84cR84cR84cR84cR84cR86cj8pB0Vw6ODAMQyNHjtSCBQu0cuVKVa9e3ZXdKdK8txfp1vs66eZBHVS1dmU9OmW4fPy9tfSTVZKk0TNHatird1njF7y7WC1uaaw+o3ooplYl3fN8X8U1r6Fv319yOWbyYt019k7F92yu2PpVNXrWSJ09mapfv1nv9OsrCeTMceTMceTMceTMceTMceTMceQMpYWHK0/+8MMP67PPPtO3336rwMBAJSYmSpKCg4Pl6+vryq7Z+PHL3xRSIUiDX+yv0OgQHdxyRM90G6+00xcWAkVWjZBxxRL0XWv2acLdkzXk5YEaOv4uJew/pRduf11Hdl6eBvXF69/Kx99Hj//nAQWE+GnHL3s0ptt45efmO/36SgI5cxw5cxw5cxw5cxw5cxw5cxw5Q2lhMgzDZTdWMpnsl1c++eQTDRky5C/3z8jIUHBwsDqqlzxMPNADAADA1QqMfK3Wt0pPT3fKFHBHXHrv+MuOSgoIdO4EmnOZFrWtf7JU5uVKLq0cuHBcAgAAAOAPXDo4AAAAAJzNFQuEWZAMAAAAoExhcAAAAABAEtOKAAAAUM6Y5Sazkz8jNzv1bFePygEAAAAASVQOAAAAUM4YhkkWw7kLhA0nn+9qUTkAAAAAIInBAQAAAICLmFYEAACAcoXnHBSNygEAAAAASVQOAAAAUM6YDTeZDSffytRw6umuGpUDAAAAAJKoHAAAAKCcscgki5M/I7eobJQOqBwAAAAAkMTgAAAAAMBFTCsCAABAucKtTItG5QAAAACAJCoHAAAAKGdccytTFiQDAAAAKEMYHAAAAACQxLQiAAAAlDMXnnPg3AXCzj7f1aJyAAAAAEASlQMAAACUMxa5ycwTku2icgAAAABAEpUDAAAAlDPcyrRoVA4AAAAASGJwAAAAAOAiphUBAACgXLHITRYWJNtF5QAAAACAJCoHAAAAKGfMhklmw7kPJXP2+a4WlQMAAAAAkhgcAAAAALiIaUUAAAAoV8wueEKymQXJAAAAAMoSKgcAAAAoVyyGmyxOfkKyhSckAwAAAChLqBwAAACgXGHNQdGoHAAAAACQxOAAAAAAwEVMKwIAAEC5YpHzn1hscerZrh6VAwAAAACSqBwAAACgnLHITRYnf0bu7PNdrbLRSwAAAAAljsEBAAAAAElMKwIAAEA5YzbcZHbyE5Kdfb6rVTZ6CQAAAKDEUTkAAABAuWKRSRY5+1amzj3f1aJyAAAAAEASlQMAAACUM6w5KFrZ6CUAAACAEsfgAAAAAIAkphUBAACgnDHLTWYnf0bu7PNdrbLRSwAAAAAljsoBAAAAyhWLYZLFcPKtTJ18vqtF5QAAAACAJAYHAAAAAC5iWhEAAADKFYsLFiRbyshn8mWjlwAAAABKHJUDAAAAlCsWw00WJz+x2Nnnu1plo5cAAAAAShyVAwAAAJQrZplklnNvLers810tKgcAAAAAJDE4AAAAAHAR04oAAABQrrAguWhlo5cAAAAAShyDg2K67aGu+u+hD7Q4e47eXfOqarWo+afx7fvcoI93vaPF2XM0betbatmtSaGYwS/219yEaVqUNUcTf3hOlWtGl1T3XYKcOY6cOY6cOY6cOY6cOY6cOY6cOY9ZlxclO28rG1w6OJgyZYoaNmyooKAgBQUFKT4+Xt9//70ru2RXh36t9cBbgzX7pa80otlTOrTtqCYsGauQCkF24+vGx+mZzx7XkhkrNaLpaP367Tq9sGC0YuvFWGP6j+6l3o900+QR0/TIDWOUk5WrCUuelae3p7Muq0SRM8eRM8eRM8eRM8eRM8eRM8eRM5QWLh0cVKlSRa+99po2btyoDRs26KabblKvXr20c+dOV3arkDuf6KHvP1qhpTNX69juE5r84DTlZuep67Cb7Mbf/mh3rV+yRV+9uVDH9iRo1rgvdGDTIfUaecvlmMe6a874eVqzcIMObz+miYPfV3ilULXp3cJZl1WiyJnjyJnjyJnjyJnjyJnjyJnjyBlKC5cODnr27Klbb71V119/veLi4jR+/HgFBATo999/d2W3bHh4eiiu2XXatHybtc0wDG1avk11b4izu0/d+DhtWrHNpm3DD1tV52J8dPVIhVcM1ebl262vZ2dka8/aA6obX6sErsK5yJnjyJnjyJnjyJnjyJnjyJnjyJnzXVqQ7OytLCg1vTSbzZo7d66ysrIUHx9vNyY3N1cZGRk2W0kLjgiUu4e7UpPSbdpTT6crNDrE7j6h0SFK+2N8UprCLsZf+jM1Ka1QTGiU/WOWJeTMceTMceTMceTMceTMceTMceQMpYnLb2W6fft2xcfHKycnRwEBAVqwYIHq1q1rN3bChAl68cUXndxDAAAA/JOYDTeZnfxJvrPPd7Vc3statWppy5YtWrt2rUaMGKHBgwdr165ddmPHjBmj9PR063b8+PES7196cqbMBWaFRgXbtIdGBis1Mc3uPqmJaQr5Y3xUiFIuxl/6848j99CokEIj/LKInDmOnDmOnDmOnDmOnDmOnDmOnKE0cfngwMvLSzVr1lSzZs00YcIENWrUSJMnT7Yb6+3tbb2z0aWtpBXkF2jfxkNq0qmBtc1kMqlJpwba9fs+u/vsWrNPTW5qYNPWtHND7b4Yn3j4tM6eSlWTTvWtr/sF+qp2q5ratWZvCVyFc5Ezx5Ezx5Ezx5Ezx5Ezx5Ezx5Ez5zNkksXJmyGTqy+7WFw+OPgji8Wi3NxcV3fDxry3F+nW+zrp5kEdVLV2ZT06Zbh8/L219JNVkqTRM0dq2Kt3WeMXvLtYLW5prD6jeiimViXd83xfxTWvoW/fX3I5ZvJi3TX2TsX3bK7Y+lU1etZInT2Zql+/We/06ysJ5Mxx5Mxx5Mxx5Mxx5Mxx5Mxx5AylhUvXHIwZM0bdunVT1apVlZmZqc8++0yrV6/W0qVLXdmtQn788jeFVAjS4Bf7KzQ6RAe3HNEz3cYr7fSFhUCRVSNkWAxr/K41+zTh7ska8vJADR1/lxL2n9ILt7+uIzsvT4P64vVv5ePvo8f/84ACQvy045c9GtNtvPJz851+fSWBnDmOnDmOnDmOnDmOnDmOnDmOnMGeDz74QG+88YYSExPVqFEjvffee2rZsmWR8e+8846mTJmiY8eOKSIiQn369NGECRPk4+NT7HOaDMMw/jqsZNx7771asWKFTp06peDgYDVs2FBPPfWUbr755mLtn5GRoeDgYHVUL3mYeKAHAACAqxUY+Vqtb5Wenu6UKeCOuPTe8f9+6y7vAOe+d8w9l683Wi8udl6++OILDRo0SFOnTlWrVq30zjvv6KuvvtLevXsVGRlZKP6zzz7TsGHDNGPGDLVu3Vr79u3TkCFDNGDAAE2aNKnY/XRp5eDjjz925ekBAACAUmnSpEkaPny4hg4dKkmaOnWqFi9erBkzZujpp58uFP/bb7+pTZs2uuuuC9PPYmNjNXDgQK1du9ah85a6NQcAAABASbIYJpdskgo9s8veWtu8vDxt3LhRnTt3tra5ubmpc+fOWrNmjd1rat26tTZu3Kh169ZJkg4dOqTvvvtOt956q0O5YXAAAAAAOElMTIyCg4Ot24QJEwrFJCcny2w2KyoqyqY9KipKiYmJdo9711136aWXXlLbtm3l6empGjVqqGPHjnrmmWcc6p/LH4IGAAAAlBfHjx+3WXPg7e19TY67evVqvfrqq/rwww/VqlUrHThwQI899phefvllPffcc8U+DoMDAAAAlCtmucns5Ak0l85XnGd1RUREyN3dXUlJSTbtSUlJio6OtrvPc889p3vuuUf33XefJKlBgwbKysrS/fffr7Fjx8rNrXjXy7QiAAAAoBTx8vJSs2bNtGLFCmubxWLRihUrFB8fb3ef7OzsQgMAd3d3SZIjNyelcgAAAIBy5coFws48pyNGjRqlwYMHq3nz5mrZsqXeeecdZWVlWe9eNGjQIFWuXNm6ZqFnz56aNGmSmjRpYp1W9Nxzz6lnz57WQUJxMDgAAAAASpn+/fvrzJkzGjdunBITE9W4cWMtWbLEukj52LFjNpWCZ599ViaTSc8++6wSEhJUoUIF9ezZU+PHj3fovC59CNrfxUPQAAAASpey8BC0R3/p5ZKHoL3btnTm5UpUDgAAAFCuWOQmi5OX3jr7fFerbPQSAAAAQImjcgAAAIByxWyYZHbygmRnn+9qUTkAAAAAIInKAQAAAMqZsnArU1ehcgAAAABAEoMDAAAAABcxrQgAAADlimG4yWI49zNyw8nnu1plo5cAAAAAShyVAwAAAJQrZplklpNvZerk810tKgcAAAAAJDE4AAAAAHAR04oAAABQrlgM5z93wGI49XRXjcoBAAAAAElUDgAAAFDOWFxwK1Nnn+9qlY1eAgAAAChxVA4AAABQrlhkksXJtxZ19vmuFpUDAAAAAJIYHAAAAAC4iGlFAAAAKFfMhklmJ9/K1Nnnu1pUDgAAAABIonIAAACAcoZbmRatbPQSAAAAQIljcAAAAABAEtOKAAAAUM5YZJLFyQuEec4BAAAAgDKFygEAAADKFcMFT0g2qBwAAAAAKEuoHAAAAKBcsRguWHPAQ9AAAAAAlCUMDgAAAABIYloRAAAAyhmekFy0stFLAAAAACWOygEAAADKFRYkF43KAQAAAABJDA4AAAAAXMS0IgAAAJQrFhc8IdnZ57taVA4AAAAASKJyAAAAgHKGBclFo3IAAAAAQBKVAwAAAJQzVA6KRuUAAAAAgCQGBwAAAAAuYloRAAAAyhWmFRWNygEAAAAASVQOAAAAUM5QOSgalQMAAAAAkhgcAAAAALiIaUUAAAAoVwxJFjl3mo/h1LNdPSoHAAAAACRROQAAAEA5w4LkolE5AAAAACCJygEAAADKGSoHRaNyAAAAAEASgwMAAAAAFzGtCAAAAOUK04qKRuUAAAAAgCQGB8V220Nd9d9DH2hx9hy9u+ZV1WpR80/j2/e5QR/vekeLs+do2ta31LJbk0Ixg1/sr7kJ07Qoa44m/vCcKteMLqnuuwQ5cxw5cxw5cxw5cxw5cxw5cxw5c55LlQNnb2VBqRkcvPbaazKZTHr88cdd3ZVCOvRrrQfeGqzZL32lEc2e0qFtRzVhyViFVAiyG183Pk7PfPa4lsxYqRFNR+vXb9fphQWjFVsvxhrTf3Qv9X6kmyaPmKZHbhijnKxcTVjyrDy9PZ11WSWKnDmOnDmOnDmOnDmOnDmOnDmOnKG0KBWDg/Xr1+s///mPGjZs6Oqu2HXnEz30/UcrtHTmah3bfUKTH5ym3Ow8dR12k9342x/trvVLtuirNxfq2J4EzRr3hQ5sOqReI2+5HPNYd80ZP09rFm7Q4e3HNHHw+wqvFKo2vVs467JKFDlzHDlzHDlzHDlzHDlzHDlzHDlDaeHywcG5c+d09913a/r06QoNDXV1dwrx8PRQXLPrtGn5NmubYRjatHyb6t4QZ3efuvFx2rRim03bhh+2qs7F+OjqkQqvGKrNy7dbX8/OyNaetQdUN75WCVyFc5Ezx5Ezx5Ezx5Ezx5Ezx5Ezx5Ez5zMMk0u2ssDlg4OHH35Y3bt3V+fOnf8yNjc3VxkZGTZbSQuOCJS7h7tSk9Jt2lNPpys0OsTuPqHRIUr7Y3xSmsIuxl/6MzUprVBMaJT9Y5Yl5Mxx5Mxx5Mxx5Mxx5Mxx5Mxx5AyliUtvZTp37lxt2rRJ69evL1b8hAkT9OKLL5ZwrwAAAPBPZpFJFjn5VqZOPt/Vclnl4Pjx43rsscc0Z84c+fj4FGufMWPGKD093bodP368hHsppSdnylxgVmhUsE17aGSwUhPT7O6TmpimkD/GR4Uo5WL8pT//OHIPjQopNMIvi8iZ48iZ48iZ48iZ48iZ48iZ48gZShOXDQ42btyo06dPq2nTpvLw8JCHh4d+/PFHvfvuu/Lw8JDZbC60j7e3t4KCgmy2klaQX6B9Gw+pSacG1jaTyaQmnRpo1+/77O6za80+NbmpgU1b084NtftifOLh0zp7KlVNOtW3vu4X6KvarWpq15q9JXAVzkXOHEfOHEfOHEfOHEfOHEfOHEfOnI9bmRbNZYODTp06afv27dqyZYt1a968ue6++25t2bJF7u7urupaIfPeXqRb7+ukmwd1UNXalfXolOHy8ffW0k9WSZJGzxypYa/eZY1f8O5itbilsfqM6qGYWpV0z/N9Fde8hr59f8nlmMmLddfYOxXfs7li61fV6FkjdfZkqn79pnhTrEo7cuY4cuY4cuY4cuY4cuY4cuY4cobSwmVrDgIDA1W/fn2bNn9/f4WHhxdqd7Ufv/xNIRWCNPjF/gqNDtHBLUf0TLfxSjt9YSFQZNUIGRbDGr9rzT5NuHuyhrw8UEPH36WE/af0wu2v68jOy9Ogvnj9W/n4++jx/zyggBA/7fhlj8Z0G6/83HynX19JIGeOI2eOI2eOI2eOI2eOI2eOI2coLUyGYRh/HeYcHTt2VOPGjfXOO+8UKz4jI0PBwcHqqF7yMPFADwAAAFcrMPK1Wt8qPT3dKVPAHXHpvWPLBY/Jw9/bqecuyMrVutsnl8q8XMmldyv6o9WrV7u6CwAAAEC5VaoGBwAAAEBJc8UCYRYkAwAAAChTGBwAAAAAkMS0IgAAAJQzhmGS4eRpPs4+39WicgAAAABAEpUDAAAAlDOGCxYkUzkAAAAAUKYwOAAAAAAgiWlFAAAAKGcMSYbh/HOWBVQOAAAAAEiicgAAAIByxiKTTHLyE5KdfL6rReUAAAAAgCQqBwAAAChneAha0agcAAAAAJDE4AAAAADARUwrAgAAQLliMUwyOXmaj7OfyHy1qBwAAAAAkETlAAAAAOWMYbjgIWhl5CloVA4AAAAASGJwAAAAAOAiphUBAACgXOE5B0WjcgAAAABAEpUDAAAAlDNUDopG5QAAAACAJCoHAAAAKGd4CFrRqBwAAAAAkMTgAAAAAMBFTCsCAABAucITkotG5QAAAACAJCoHAAAAKGcuVA6cfStTp57uqlE5AAAAACCJygEAlBqWto1d3YUyxyMjx9VdKJMKgnxc3YUyx+tEiqu7UHZYcqWjru4ErhaDAwAAAJQrPCG5aEwrAgAAACCJygEAAADKGePi5uxzlgVUDgAAAABIonIAAACAcoY1B0WjcgAAAABAEoMDAAAAABcxrQgAAADlCyuSi0TlAAAAAIAkKgcAAAAob1ywIFksSAYAAABQljA4AAAAAEqhDz74QLGxsfLx8VGrVq20bt26P41PS0vTww8/rIoVK8rb21txcXH67rvvHDon04oAAABQrhjGhc3Z53TEF198oVGjRmnq1Klq1aqV3nnnHXXt2lV79+5VZGRkofi8vDzdfPPNioyM1Ndff63KlSvr6NGjCgkJcei8DA4AAACAUmbSpEkaPny4hg4dKkmaOnWqFi9erBkzZujpp58uFD9jxgylpKTot99+k6enpyQpNjbW4fMyrQgAAADlyqUnJDt7k6SMjAybLTc3t1D/8vLytHHjRnXu3Nna5ubmps6dO2vNmjV2r2nhwoWKj4/Xww8/rKioKNWvX1+vvvqqzGazQ7lhcAAAAAA4SUxMjIKDg63bhAkTCsUkJyfLbDYrKirKpj0qKkqJiYl2j3vo0CF9/fXXMpvN+u677/Tcc8/prbfe0iuvvOJQ/5hWBAAAgPLFMDn/1qIXz3f8+HEFBQVZm729va/J4S0WiyIjIzVt2jS5u7urWbNmSkhI0BtvvKHnn3++2MdhcAAAAAA4SVBQkM3gwJ6IiAi5u7srKSnJpj0pKUnR0dF296lYsaI8PT3l7u5ubatTp44SExOVl5cnLy+vYvWPaUUAAABAKeLl5aVmzZppxYoV1jaLxaIVK1YoPj7e7j5t2rTRgQMHZLFYrG379u1TxYoViz0wkBgcAAAAoJy5dCtTZ2+OGDVqlKZPn65Zs2Zp9+7dGjFihLKysqx3Lxo0aJDGjBljjR8xYoRSUlL02GOPad++fVq8eLFeffVVPfzwww6dl2lFAAAAQCnTv39/nTlzRuPGjVNiYqIaN26sJUuWWBcpHzt2TG5ulz/nj4mJ0dKlS/XEE0+oYcOGqly5sh577DE99dRTDp2XwQEAAADKF+Pi5uxzOmjkyJEaOXKk3ddWr15dqC0+Pl6///674ye6AtOKAAAAAEhicAAAAADgIqYVAQAAoFy58onFzjxnWUDlAAAAAIAkKgcAAAAoj5y9ILmMoHIAAAAAQBKVAwAAAJQzrDkoGpUDAAAAAJIYHAAAAAC4iGlFAAAAKF/KyBOSXYHKAQAAAABJVA6K7baHuqrvk7cpLDpEB7ce1QePztDe9QeKjG/f5wYNfmmAomMrKGF/oj56erbWfb/ZJmbwi/3V7b5OCgjx185f9+jdh6Yr4UBiSV+K05Azx5Ezx5Ezx/W6vZn6DbhBYWEBOngwSe9N/kF7d5+0G1stNkJD7u2guLhoRVcM0Qfv/aD5X623iXFzM2nQ0Pbq3KW+wsL8dTb5nJZ+v02zP/3FGZfjFD37t1SfwW0VFh6gQ/sS9eHExdq7I8FubLUakRo04ibVrFtJ0ZVCNfWN77Rgzpoij91vaDvd+1gXLZjzm6a+8X1JXYLT8XPmuB73tFGf4R0VWiFQh3af1JQXFmjftuNFxrft1lCDRnVTVJVQJRxJ1icTF2n96j3W10MiAjRsdA81bRcn/yBf7Vh3SFNeXKCTR5KdcTmlnOni5uxzln4urRy88MILMplMNlvt2rVd2SW7OvRrrQfeGqzZL32lEc2e0qFtRzVhyViFVAiyG183Pk7PfPa4lsxYqRFNR+vXb9fphQWjFVsvxhrTf3Qv9X6kmyaPmKZHbhijnKxcTVjyrDy9PZ11WSWKnDmOnDmOnDmu40119ODDnfXpzJ/14H0f6+CB05r45gCFhPjZjffx8dSpk6n66D+rdPbsObsxA+6K1229muq9t5dq6D3/0fSpK9X/rht0+53NS/JSnKZDl/q6/9/dNOc/q/TwwCk6tC9R4z8crOBQf7vx3j6eOpWQqhmTl+nsmcw/PXZcvcrq3qeFDu395ww+JX7Orkb77o11/zO3ac67P+iRnm/r8O6TemXW/QoOD7AbX6dprJ6e/C8t/XKtRvaYpDU/7NBzU4eqWly0NWbc1KGKrhqmlx74RCN7TNLphFS9+t8H5O3r5azLQhnk8mlF9erV06lTp6zbL7+Uvk8A7nyih77/aIWWzlytY7tPaPKD05Sbnaeuw26yG3/7o921fskWffXmQh3bk6BZ477QgU2H1GvkLZdjHuuuOePnac3CDTq8/ZgmDn5f4ZVC1aZ3C2ddVokiZ44jZ44jZ47r06+Vvlu0RUu/36ajR5P1zlvfKTenQLd0b2Q3fu+eU5o2ZaVWrdyl/LwCuzH16lfRb7/u09rfDygpMV0//bhHG9YfVu06lUryUpzmjntaa8n8Dfrh2806duiM3n3lf8rNyVfX3k3txu/bmaCP3l6qH5duV36+/ZxJko+vl556tY/eeekbZWaeL6nuuwQ/Z467/d72+v6L37Xs6/U6diBJ7z07T7nn89Wlb0u78b2GtNOGn/Zq3vTVOn7wtP779hId3JmgnoPaSJIqV49Qnaaxev+5edq37bgSDp/R+8/Nk7e3pzr2bOLMS0MZ4/LBgYeHh6Kjo61bRESEq7tkw8PTQ3HNrtOm5dusbYZhaNPybap7Q5zdferGx2nTim02bRt+2Ko6F+Ojq0cqvGKoNi/fbn09OyNbe9YeUN34WiVwFc5FzhxHzhxHzhzn4eGmuLiK2rThsLXNMKRNGw+rbr0qV33cnTtOqEnTWFWpEiZJuq5GpBo0qKJ1aw/+7T67moeHu66vU0mb1h6ythmGoc1rD6puw5g/2fOvjXymh9b9vE+brzj2PwE/Z47z8HTX9fWraMuv+61thmFoy6/7VKdJNbv71GlaTVt+3WfTtvHnvarTJFaS5Ol1YeZ4fu7lwZZhGMrPM6te8+rX+ArKIMNFWxng8jUH+/fvV6VKleTj46P4+HhNmDBBVatWtRubm5ur3Nxc69cZGRkl3r/giEC5e7grNSndpj31dLpiale2u09odIjS/hiflKaw6BBJsv6ZmpRWKCY0KuRadNulyJnjyJnjyJnjgoP95O7hptTULJv21JQsxVQNv+rjfj7nN/n5e+uT2Q/KYrHIzc1NM6av1oplO/9ul10uKNRP7h7uSvvDVJfUs+cUE3v1H2Z16NpANWtX0iN3T/27XSx1+DlzXFCo/4V/z5Jtp6GlJp9TlRqRdvcJjQhUavIffi6TMxVaIVCSdPzgaSUlpGjI/92q98Z+rZzzebp9WHtVqBSisEj7Uy8BycWDg1atWmnmzJmqVauWTp06pRdffFHt2rXTjh07FBgYWCh+woQJevHFF13QUwBAUTreWFedbq6vV1/6RkeOnFGNmlF6+JGbdfZspn5Ysv2vD1DOVIgK0ojRt2rMgzOLnEKDwvg5c4y5wKJXRszS46/101dbXpG5wKzNv+7X+tW7Xd210oFbmRbJpYODbt26Wf/esGFDtWrVStWqVdOXX36pe++9t1D8mDFjNGrUKOvXGRkZion5e2Xdv5KenClzgVmhUcE27aGRwUpNTLO7T2pimkL+GB8VopSL8Zf+vLLt0tcHtx65Rj13HXLmOHLmOHLmuPT0bJkLLAr9w0La0DB/paRkFbHXX7v/oU6aO+c3rVq5S5J0+NAZRUUHa+Ddrcv8m7aM1GyZC8wK+cOi0NDwgEKf2hZXzbqVFRoeoA8+H2Ftc/dwV4Om1XRb/1bq0fJFWSxl5F2EHfycOS4jNevCv2cRth+MhkYEKLWIRe2pyZkKjfjDz2VEoE38gR0nNLLHJPkF+sjT013pKVl6e/6j2r/9xLW/CPxjuHzNwZVCQkIUFxenAwfs34bQ29tbQUFBNltJK8gv0L6Nh9SkUwNrm8lkUpNODbTr931299m1Zp+a3NTApq1p54bafTE+8fBpnT2Vqiad6ltf9wv0Ve1WNbVrzd4SuArnImeOI2eOI2eOKyiwaN++U2rSLNbaZjJJTZrGatfOq3+z4OPtUejNrMVsyM2tbNy2788UFJi1f/dJNWl5nbXNZDKpccvrtOtPbjH5Z7asPaj773xPI/p/aN327jyhld9t04j+H5bpgYHEz9nVKMg3a/+OE2rc+nprm8lkUuPW12v35qN299m96ahNvCQ1aROn3ZuPFIrNzsxRekqWKsVG6PoGMfp92Y5r2v8yyTC5ZisDXL7m4Ernzp3TwYMHdc8997i6Kzbmvb1Io2c+rH0bDmrvugO6/fHu8vH31tJPVkmSRs8cqeSTKZrxzGeSpAXvLtZbq19Un1E9tHbxJnUc0EZxzWvonQf+Yz3mgsmLddfYO5WwP1GnDp/WkJf66+zJVP36zXq7fShryJnjyJnjyJnjvv5yrZ4ac5v27T2lPbtP6s6+LeXj66ml311YqP3UMz2VnJypj6etlnRhcWm12AoX/u7proiIQNWoGaXz5/N0MiFVkrTmt/26+542Op2UoSNHzqjm9dHq07+llny31SXXeK3N/+9vevLlO7RvV4L27kjQ7XfHy8fXSz98u0mS9H8v36nk0xn65L1lki4sYq5a40LOPD3cFR4ZpOtqRSsnO08nj6fofHaejh48bXOOnPP5ykzPLtReVvFz5rgFH/+kf785QPu3H9fercfUe2h7eft5adnX6yRJ/35zoM4mpWvmG99Jkr6d+bNe//wh3XFvB61btVsdejbW9Q2q6N2xX1mP2bZbQ6WnZOnMyVTF1qqoB8f11pplO7TpF/sfoACSiwcHTz75pHr27Klq1arp5MmTev755+Xu7q6BAwe6sluF/PjlbwqpEKTBL/ZXaHSIDm45ome6jVfa6QsLGyOrRsi44tOMXWv2acLdkzXk5YEaOv4uJew/pRduf11Hdl7+lOmL17+Vj7+PHv/PAwoI8dOOX/ZoTLfxys/Nd/r1lQRy5jhy5jhy5rjVK3crOMRfQ4Z1UGiYvw4eSNLTT861Lh6NjAqWYVzOWXhEoKbNuM/6df+B8eo/MF5bNh/Vvx+bLUl6750fNPS+Dnps1C0KCfXT2eRzWrRws/4782fnXlwJ+fGHHQoO9degEZ0UGhGgQ3tPaexDnyrt4hSZChWDZTEs1vjwyEBN+eJh69d9B7dV38FttXXDYY2+b4bT++8K/Jw57qfFWxQc5q9/PdFVYRFBOrg7Qc8Nma60i9PXIiuF2Px7tnvTEU18fLYG/7ubhjx5qxKOnNHLD36io/suPzMjLDJI94/tpZCIAKWcydCK+Rv1+fvLnH5tKFtMxpW/nU42YMAA/fTTTzp79qwqVKigtm3bavz48apRo0ax9s/IyFBwcLA6qpc8TP+MBxQBKL8sbRu7ugtljkdGjqu7UCYVBPm4ugtljteJFFd3ocwosORq+dEPlJ6e7pQp4I649N6xyvsvys3Xub8HlvM5OjHy+VKZlyu5tHIwd+5cV54eAAAAwBVK1ZoDAAAAoMRxK9Milaq7FQEAAABwHQYHAAAAACQxrQgAAADljSueO1BGnnNA5QAAAACAJCoHAAAAKGdMxoXN2ecsC6gcAAAAAJDE4AAAAADARUwrAgAAQPnCcw6KROUAAAAAgCQqBwAAAChvuJVpkagcAAAAAJBE5QAAAADlDWsOivS3Kwdms1lbtmxRamrqtegPAAAAABdxeHDw+OOP6+OPP5Z0YWDQoUMHNW3aVDExMVq9evW17h8AAAAAJ3F4cPD111+rUaNGkqT//e9/Onz4sPbs2aMnnnhCY8eOveYdBAAAAK4pw0VbGeDw4CA5OVnR0dGSpO+++059+/ZVXFychg0bpu3bt1/zDgIAAABwDocHB1FRUdq1a5fMZrOWLFmim2++WZKUnZ0td3f3a95BAAAA4JqiclAkh+9WNHToUPXr108VK1aUyWRS586dJUlr165V7dq1r3kHAQAAADiHw4ODF154QfXr19fx48fVt29feXt7S5Lc3d319NNPX/MOAgAAAHAOhwcHn376qfr3728dFFwycOBAzZ0795p1DAAAACgRPCG5SA6vORg6dKjS09MLtWdmZmro0KHXpFMAAAAAnM/hyoFhGDKZCo98Tpw4oeDg4GvSKQAAAKCkmIwLm7PPWRYUe3DQpEkTmUwmmUwmderUSR4el3c1m806fPiwbrnllhLpJAAAAICSV+zBQe/evSVJW7ZsUdeuXRUQEGB9zcvLS7GxsbrzzjuveQcBAACAa8oVtxb9p1UOnn/+eUlSbGys+vfvLx8fnxLrFAAAAADnc3jNweDBg0uiHwAAAABcrFiDg7CwMO3bt08REREKDQ21uyD5kpSUlGvWOQAAAADOU6zBwdtvv63AwEBJ0jvvvFOS/QEAAADgIsUaHFw5lYhpRQAAACjLTHLBrUyde7qr5vCagysZhqFVq1bp/Pnzat26tUJDQ69VvwAAAAA4WbEHB2lpaXrssce0adMm3XDDDXrrrbd066236rfffpMkRUZG6ocfflDDhg1LrLMAyg6P2Kqu7kKZs+dO7gLnqG5td7u6CygnYnxYU1lcOefytfwGV/cCV8utuIFPPvmk1qxZowEDBmj79u265ZZbZDabtWbNGq1du1Z16tTR2LFjS7KvAAAAwN9nmFyzlQHFrhx8//33+uyzz9ShQwcNGTJEMTExWrlypVq1aiVJmjhxom677bYS6ygAAACAklXswUFSUpLi4uIkSZUrV5aPj49iYmKsr1etWlVnzpy59j0EAAAAriWekFykYk8rslgscnd3t37t7u5u87yDP3v2AQAAAIDSz6G7FX300UcKCAiQJBUUFGjmzJmKiIiQJGVmZl773gEAAADXGpWDIhV7cFC1alVNnz7d+nV0dLT++9//FooBAAAAUDYVe3Bw5MiREuwGAAAAAFf7Ww9BAwAAAMoak+GCJySXkWlFxV6QDAAAAOCfjcoBAAAAyhcWJBeJygEAAAAASQwOAAAAAFzEtCIAAACUL0wrKlKxBwdubm5/+RRkk8mkgoKCv90pAAAAAM5X7MHBggULinxtzZo1evfdd2WxWK5JpwAAAICSwq1Mi1bswUGvXr0Kte3du1dPP/20/ve//+nuu+/WSy+9dE07BwAAAMB5rmpB8smTJzV8+HA1aNBABQUF2rJli2bNmqVq1apd6/4BAAAA15Zhcs1WBjg0OEhPT9dTTz2lmjVraufOnVqxYoX+97//qX79+iXVPwAAAABOUuxpRa+//romTpyo6Ohoff7553anGQEAAAAou4o9OHj66afl6+urmjVratasWZo1a5bduPnz51+zzgEAAADXHLcyLVKxBweDBg36y1uZAgAAACi7ij04mDlzZgl2AwAAAHAObmVatKu6WxEAAACAfx4GBwAAAAAkOTCtCAAAAPhHYEFykagcAAAAAJBE5QAAAADljQsWJFM5AAAAAFCmUDkAAABA+cKagyJROQAAAAAgicEBAAAAgIuYVgQAAIDyhWlFRaJyAAAAAEASlYNiu+2hrur75G0Kiw7Rwa1H9cGjM7R3/YEi49v3uUGDXxqg6NgKStifqI+enq1132+2iRn8Yn91u6+TAkL8tfPXPXr3oelKOJBY0pfiNOTMceTMcT3uaaM+wzsqtEKgDu0+qSkvLNC+bceLjG/braEGjeqmqCqhSjiSrE8mLtL61Xusr4dEBGjY6B5q2i5O/kG+2rHukKa8uEAnjyQ743Kc4p6GjXV/s+aq4Oev3cln9MLqldqaZP9n4s469fRml1ts2nILClT7g8k2bU/c0FoD6jdQkLe3Npw8qedWLdeRtLSSugSna1+hs26O6q4gz2CdOH9MXx77VEezD9mNfTxurOIC6xRq35G+RR8eeFOS9GGz2Xb3nX/icy1PWnztOu5C5MxxTcN6qFX4nQrwCNXpnMP6IXGKTp3fV2R8i7BeahLWXUGeFXTenKE9Gb9oddJMmY18SVJ8RD/VCmqtMK8qKjDylJC9W6uSZiglL8FZl1RqmVxwK1On3zr1Krm8cpCQkKB//etfCg8Pl6+vrxo0aKANGza4uls2OvRrrQfeGqzZL32lEc2e0qFtRzVhyViFVAiyG183Pk7PfPa4lsxYqRFNR+vXb9fphQWjFVsvxhrTf3Qv9X6kmyaPmKZHbhijnKxcTVjyrDy9PZ11WSWKnDmOnDmufffGuv+Z2zTn3R/0SM+3dXj3Sb0y634FhwfYja/TNFZPT/6Xln65ViN7TNKaH3boualDVS0u2hozbupQRVcN00sPfKKRPSbpdEKqXv3vA/L29XLWZZWo7tfX0th2HTR57Rr1+Py/2n3mjGb1vlPhvr5F7pORm6sW06dYt7afTLd5/YFmLTSkcRM9u3K5bv/iM53Pz9es3nfKy929pC/HKZqFttKdVe7W4lMLNGH3s0rIPqZHrn9KAR72fzenHXxHT2992Lq9vPMpmQ2zNqWutcZc+frTWx/Wp0emyWJYtDl1nbMuq0SRM8fVCWqvTlHD9cuZzzTj0CNKyjmk/tVelp97sN34usEd1TFqqH4585mmH3hA3yW8ozpB7dUxcog1pqpffW1MWaRPD4/S3CNj5WZy14Bq4+Vp8nbSVaEscungIDU1VW3atJGnp6e+//577dq1S2+99ZZCQ0Nd2a1C7nyih77/aIWWzlytY7tPaPKD05Sbnaeuw26yG3/7o921fskWffXmQh3bk6BZ477QgU2H1Gvk5U/fbn+su+aMn6c1Czfo8PZjmjj4fYVXClWb3i2cdVklipw5jpw57vZ72+v7L37Xsq/X69iBJL337Dzlns9Xl74t7cb3GtJOG37aq3nTV+v4wdP679tLdHBngnoOaiNJqlw9QnWaxur95+Zp37bjSjh8Ru8/N0/e3p7q2LOJMy+txNzXtJm+2LldX+/aqQMpKRq7cpnOF+Srb70Gf7KXoeTsbJvtSsOaNNX769Zq2aGD2pOcrH//8L2i/APUpUbNkr0YJ7kpqpt+TV6l38/+pMSck/r82CfKs+SqdXgHu/HZ5ixlFKRbt9pB9ZVnydOmK97EXvl6RkG6GoU01b7M3Tqbd8ZZl1WiyJnjWobfrq2pS7Q9bZnO5h7XklPvq8CSq4ahXezGV/GtoxPZu7QrfbXS80/rcNZm7Ur/URV946wxXxwbp+1py5Wce0yncw9rUcIkBXtFKtr3emddFsoglw4OJk6cqJiYGH3yySdq2bKlqlevri5duqhGjRqu7JYND08PxTW7TpuWb7O2GYahTcu3qe4NcXb3qRsfp00rttm0bfhhq+pcjI+uHqnwiqHavHy79fXsjGztWXtAdeNrlcBVOBc5cxw5c5yHp7uur19FW37db20zDENbft2nOk2q2d2nTtNq2vKrbYl+4897VadJrCTJ0+vCTMv83AKbY+bnmVWvefVrfAXO5+nmpvqRUfrl2DFrmyHp12PH1DS6YpH7+Xl66Zehw/XrsPs1rUcvXR8Wbn0tJihYkf4B+uXYUWtbZl6etiSeUtPoSiVyHc7kbnJXVb/q2pux09pmyNCezJ2qHlC8wU/riI7amLJGeZZcu68HegSpfnBj/Za8+lp02eXImePcTB6K9q2pw1lbrmg1dCRriyr71ra7z4nzuxXtW9M6GAjxjFaNwOY6eG59kefxcfeXJJ03Z16rruMfyKWDg4ULF6p58+bq27evIiMj1aRJE02fPr3I+NzcXGVkZNhsJS04IlDuHu5KTUq3aU89na7Q6BC7+4RGhyjtj/FJaQq7GH/pz9SktEIxoVH2j1mWkDPHkTPHBYX6X8hZsu1/cqnJ5xRaIdDuPqERgUpNPveH+Exr/PGDp5WUkKIh/3erAoJ85eHprr4P3KgKlUIUFml/OkRZEurrKw83NyVnZ9m0J2dnq4K/v919DqWm6KllSzX8f99o1NLv5GYy6et+AxUdcGHq1qX9/lhN+LNjliUBHoFyN7kro8D2dy0zP11Bnvane1ypmt91quwbo1//5E3sDeHtlGPO0Za00jWl9mqRM8f5uQfJzeSu7IJUm/asgjQFeITZ3WdX+mr9fHq27ol9Q6PrLtSIuBk6lrVda5K/LOIsJnWOfkDHs3YqOfdoETGAiwcHhw4d0pQpU3T99ddr6dKlGjFihB599FHNmjXLbvyECRMUHBxs3WJiYuzGAcDVMBdY9MqIWapcvYK+2vKKvtk5QQ1vqKn1q3fLYrG4unsusTnxlObv2aXdyWe0NuGEHly8UCnns3VX/Yau7lqZ0DqioxKyjxW5EFeS4iM6aH3Kbyq4uIi0vCNnxVPVr4HiI/pp6akP9cnBRzXv2MuqEdBCbSoMtBvfteJDivCupm9PvObknpZShou2MsClgwOLxaKmTZvq1VdfVZMmTXT//fdr+PDhmjp1qt34MWPGKD093bodP170HUmulfTkTJkLzAqNsv20IzQyWKmJaXb3SU1MU8gf46NClHIx/tKff/z0NjQqpNCnvGUROXMcOXNcRmrWhZxF2FYJQiMClHrGfsk8NTlToREBf4gPtIk/sOOERvaYpDsbjdXdN7yo54ZOV2CInxKPp1z7i3Cy1PPnVWCxKMLP9hP9CD8/ncnKKmIvWwUWi3adOa1qIRfWhl3aL8LP76qPWZqdK8iU2TAryMP2dy3QM1gZ+elF7HWBl5u3mofdoN/O/lhkTI2AWor2qfSnn5KXNeTMcdnmDFkMs/w8bNdc+nuE6FyB/X972kfeox3pK7U1banO5B7Rvsw1+vH0LMVH9JVksontEj1CNQNb6rMjTyuz4GxJXQb+IVw6OKhYsaLq1q1r01anTh0du2I+7JW8vb0VFBRks5W0gvwC7dt4SE06XV6sZzKZ1KRTA+363f7txXat2acmN9ku7mvauaF2X4xPPHxaZ0+lqkmn+tbX/QJ9VbtVTe1as7cErsK5yJnjyJnjCvLN2r/jhBq3vrywzmQyqXHr67V7s/2S+e5NR23iJalJmzjt3nykUGx2Zo7SU7JUKTZC1zeI0e/LdlzT/rtCvsWiHaeT1CamqrXNJKl1TFVtSjxVrGO4mUyqFV5Bp7MuTM86npGu01nnbI4Z4OWlxtEVtSnx5DXtvyuYDbOOZR9WraB61jaTTKoVWE+HzxV9m2FJahraUh4mD607+2uRMa3DO+ho1iElnLf//15ZRM4cZzEKlHj+gGL9G13RalI1/8ZKOL/H7j4ebt4y/vBRtGFYLu55eXDQJXqE4oLi9dmRMUrPT7rmfS+rLt3K1NlbWeDSwUGbNm20d6/tm5R9+/apWjX7iwldZd7bi3TrfZ1086AOqlq7sh6dMlw+/t5a+skqSdLomSM17NW7rPEL3l2sFrc0Vp9RPRRTq5Lueb6v4prX0LfvL7kcM3mx7hp7p+J7Nlds/aoaPWukzp5M1a/fFL2QqCwhZ44jZ45b8PFPumVAK3W+o7liakRq5Mt3ytvPS8u+vnCHk3+/OVBD/u9Wa/y3M39Ws/a1dce9HVTlukjd/VgXXd+giv736eU3Im27NVSDVjUUHROmGzrX06ufPqA1y3Zo0y9F32u8LPlo00YNqN9Ad9SpqxqhYXrlps7y8/TU17suDH7e6nKL/q91W2v8Iy1vULuq1RQTFKx6FSL1dtdbVTkoUF/svLzQfcbmTRrZ8gZ1rl5DtcIj9FaXbkrKOqcfDv75G8GyYmXS92oT0VGtwtop2qeSBlQdKm83b625+On24NgH1KtSv0L7tQ7vqK1pG5VlPlfoNUnycfNV09CW/5hFtVciZ45bd3aBGofeogbBnRTuFaNbKj4sTzdvbUtdJknqUfnf6nDFbUoPZK5T09DuqhPUXsGeUYr1b6L2kfdof+Y6GbowSOha8SHVC7lR3554XXmW8/L3CJW/R6g8TP+MWzOjZLj0IWhPPPGEWrdurVdffVX9+vXTunXrNG3aNE2bNs2V3Srkxy9/U0iFIA1+sb9Co0N0cMsRPdNtvNJOXyiPRlaNkGG5PBzctWafJtw9WUNeHqih4+9Swv5TeuH213Vk5+VpUF+8/q18/H30+H8eUECIn3b8skdjuo1Xfu4/Y/4kOXMcOXPcT4u3KDjMX/96oqvCIoJ0cHeCnhsyXWkXFx1HVgqxydnuTUc08fHZGvzvbhry5K1KOHJGLz/4iY7uu/wAsLDIIN0/tpdCIgKUciZDK+Zv1OfvL3P6tZWUxfv3KtzXV6NuaKMIPz/tTj6jId/Msy4orhQYJItxOWfBPj6a0KmLIvz8lJGbq+2nk3Tnl3N1IOXyVIf/bFwvP09PvdrpZgV5e2v9yQQN+Wa+8sxmp19fSdiYulYBHkHqUenOiw/0Oqr397+uzIILN8UI9YqwyZkkRXpXVM3AWnp3X9Hzu5uF3SCTyaT1KWtKtP+uQM4ctzvjJ/l5BKld5D3y9wjV6ZxD+vLoOGWb0yRJQZ4VrJUBSfr1zOeSDHWIHKQAz3BlF6TrwLl1+jHp8rrNpmE9JEn/qv66zbkWJUzS9rTlJX5NKJtMhmG4tMixaNEijRkzRvv371f16tU1atQoDR8+vFj7ZmRkKDg4WB3VSx6mf8ZDnYB/Co/Yqn8dBBt7Hiv7t/50tm5tN/91EHANxPiU/XVHzpJzLl8v3bBc6enpTpkC7ohL7x1rPv2q3L19nHpuc26ODrz2TKnMy5VcWjmQpB49eqhHjx6u7gYAAABQ7rl8cAAAAAA4lStuLcqCZAAAAABlCYMDAAAAAJKYVgQAAIByxhXPHeA5BwAAAADKFCoHAAAAKF9YkFwkKgcAAAAAJDE4AAAAAHAR04oAAABQrrAguWhUDgAAAABIonIAAACA8oYFyUWicgAAAABAEoMDAAAAlDeGizYHffDBB4qNjZWPj49atWqldevWFWu/uXPnymQyqXfv3g6fk8EBAAAAUMp88cUXGjVqlJ5//nlt2rRJjRo1UteuXXX69Ok/3e/IkSN68skn1a5du6s6L4MDAAAAoJSZNGmShg8frqFDh6pu3bqaOnWq/Pz8NGPGjCL3MZvNuvvuu/Xiiy/quuuuu6rzMjgAAABAuXLpVqbO3iQpIyPDZsvNzS3Uv7y8PG3cuFGdO3e2trm5ualz585as2ZNkdf10ksvKTIyUvfee+9V54bBAQAAAOAkMTExCg4Otm4TJkwoFJOcnCyz2ayoqCib9qioKCUmJto97i+//KKPP/5Y06dP/1v941amAAAAKF9ceCvT48ePKygoyNrs7e39tw+dmZmpe+65R9OnT1dERMTfOhaDAwAAAMBJgoKCbAYH9kRERMjd3V1JSUk27UlJSYqOji4Uf/DgQR05ckQ9e/a0tlksFkmSh4eH9u7dqxo1ahSrf0wrAgAAAEoRLy8vNWvWTCtWrLC2WSwWrVixQvHx8YXia9eure3bt2vLli3W7bbbbtONN96oLVu2KCYmptjnpnIAAACA8qUMPCF51KhRGjx4sJo3b66WLVvqnXfeUVZWloYOHSpJGjRokCpXrqwJEybIx8dH9evXt9k/JCREkgq1/xUGBwAAAEAp079/f505c0bjxo1TYmKiGjdurCVLllgXKR87dkxubtd+EhCDAwAAAJQrV95a1JnndNTIkSM1cuRIu6+tXr36T/edOXOm4ycUaw4AAAAAXETlAAAAAOVLGVhz4CpUDgAAAABIYnAAAAAA4CKmFQEAAKBcKSsLkl2BygEAAAAASVQOAAAAUN6wILlIVA4AAAAASKJyAKCEnL6psqu7UOZUrpvo6i6UOW2D9rm6C2VSjOdZV3ehzKnmke3qLpQZmW4WveTqTuCqMTgAAABA+cK0oiIxrQgAAACAJCoHAAAAKGdMFzdnn7MsoHIAAAAAQBKVAwAAAJQ3rDkoEpUDAAAAAJIYHAAAAAC4iGlFAAAAKFdMxoXN2ecsC6gcAAAAAJBE5QAAAADlDQuSi0TlAAAAAIAkBgcAAAAALmJaEQAAAMqfMjLNx9moHAAAAACQROUAAAAA5Qy3Mi0alQMAAAAAkqgcAAAAoLzhVqZFonIAAAAAQBKDAwAAAAAXMa0IAAAA5QoLkotG5QAAAACAJCoHAAAAKG9YkFwkKgcAAAAAJDE4AAAAAHAR04oAAABQrrAguWhUDgAAAABIonIAAACA8oYFyUWicgAAAABAEpUDAAAAlDdUDopE5QAAAACAJAYHAAAAAC5iWhEAAADKFW5lWjQqBwAAAAAkUTkottse6qq+T96msOgQHdx6VB88OkN71x8oMr59nxs0+KUBio6toIT9ifro6dla9/1mm5jBL/ZXt/s6KSDEXzt/3aN3H5quhAOJJX0pTkPOHEfOHNf3xkYa1LW5woP9tf/4Gb3++SrtPGz/+m5sWlPDbm2pmMgQebi761hSqmb/sFHf/b5bkuTh7qYRvduobYPqqlwhWOfO52rtrmN6b97PSk7PcuZllag7qsRrYGwHhXkF6uC5U3p7z7fanXG8yPgADx/dX/MWtY+sryBPPyWdT9Xkff/T78l7JEluMmlYjZvVpWJThXsFKjk3Q9+d3KBZh1c465JK3PXBfVQn9F/ydQ9Xat5+bTz9ps7m7ioyvlbIAF0ffKf8PKKUa07X8XMrteXsB7IYedYYX/cKahwxUpX8W8vd5K1z+Sf0e9LLSsnd7YxLKnEVA/+lKsHD5eVeQefyduvg2Rd1Lm+b3ViTPBQT/KAiA+6Qt0e0svMP6Ujq60o9/5M1Jsi7haoED1eAV315e0Rp1+kHdTZ7mbMuxykC/IcoKPAhubtXUF7+LqWmjlVe/pYioj0UFPiI/P37ycM9Wvn5B5WWPl45uausEcFB/1Zw0JM2e+XnH9CppHYldxFlBQuSi+TSykFsbKxMJlOh7eGHH3Zltwrp0K+1HnhrsGa/9JVGNHtKh7Yd1YQlYxVSIchufN34OD3z2eNaMmOlRjQdrV+/XacXFoxWbL0Ya0z/0b3U+5Fumjximh65YYxysnI1Ycmz8vT2dNZllShy5jhy5ribW8RpVL8Omva/33X3S7O17/gZvf/4HQoN9LUbn5GVoxmL12nIhLka8MKn+t+vO/X80K6Kr1dNkuTj5aHa1SL10aILx3vyw/8pNjpUbz/Sy5mXVaJuimqkkbV66pNDy3Xv2sk6kHlKk5reqxBPf7vxHiZ3vd10uKJ9QvXc1v/qrl/f0MTd85Sck26NuTu2o3pXidfbe77R3b+9qSn7v9PdsR3VJ6aNsy6rRFUN6KymEY9rR8pH+v74IKXl7teNld+Vt3uo3fhqgV3VOPxhbU/5SIuP9tfa06+oamBnNQ5/yBrj6Raom2Omy6ICrU54TIuPDtCm5MnKs2Q467JKVIRfd10X9oyOpb2rzSdvU1beHtWPmilPt3C78dVCRyk6cKAOprykjQldlZj5mepUmCJ/r7rWGHc3P2Xl7dHBlBecdBXO5ed7m0JDXlB6xls6ldRV+Xm7FFnhc7kVkbOQ4KcUEHCPUtPG6mRiB53L+lQRER/L07O+TVxe/h6dONnQuiWd+ef8e4aS4dLBwfr163Xq1CnrtmzZhU8A+vbt68puFXLnEz30/UcrtHTmah3bfUKTH5ym3Ow8dR12k9342x/trvVLtuirNxfq2J4EzRr3hQ5sOqReI2+5HPNYd80ZP09rFm7Q4e3HNHHw+wqvFKo2vVs467JKFDlzHDlz3L9ubqYFP+/Q/37dqcOnUvTq7OXKyStQr7b17cZv3HtCqzYf0JFTKTpxJl2fr9isAyfOqHHNypKkc+fz9PCkeVq2YZ+OJqVqx6FTmvjZStWNjVZ0WKAzL63EDKjWTv87sVbfndygI1mn9cbu+cox56tHZfs/E90rt1CQp5/GbJ2l7elHlZiTqi2ph3Tg3ClrTP2QWP1yZqfWJO9RYk6qVp/ernVn96lOcIzdY5Y1tUPv0sGMb3QoY5Ey8g5r3enXVGDkqEZQT7vxFXwa6EzONh3NXKqsglNKzF6ro5k/KMzn8hvduqGDlF1wWmuTXtbZ3F3KKjipxOy1Opef4KzLKlGVg4cpMfMLJZ2bp+z8Azpw9llZjPOKCuxjNz7Sv7eOp09R6vnVyik4rlOZnyn1/GpVCbrXGpN6/kcdTZuks9k/OOsynCow8AGdy5qjrOwvVFCwTylpo2UxzivAf6DdeD+/PsrIeFc5OStlNh/TuaxPlXN+pYICHrQNNApksZy5YktxwtWgLHPp4KBChQqKjo62bosWLVKNGjXUoUMHV3bLhoenh+KaXadNyy+XQg3D0Kbl21T3hji7+9SNj9OmFbal0w0/bFWdi/HR1SMVXjFUm5dvt76enZGtPWsPqG58rRK4CuciZ44jZ47zcHdT7WpRWrfrqLXNMKR1u4+qwXUVi3WMFrVjVC06TJv2nygyJsDXWxaLoczs3L/dZ1fzMLkrLrCyNqRcnqpmyNCGlP2qF1zN7j5tK9TVjvSj+nft27Ww/XP6NH6U7om9UW4yWWN2pB1Rs7CaivGLkCTVDKiohiGx+j15b8lekBO4yUNh3rWVmL3+ilZDidnrFeHTwO4+Z3K2K8y7tsK9LwwG/D0qqZJ/a53M+s0aU8W/nVJydqtt9ATdUX2Jbon5r2oE/TM+0TXJU4Fe9ZWW89sVrYbScn5TkHcTu/u4mbxkMWx/xyxGjoJ8mpdgT0sTT3l5NlROzs9XtBnKyflZXl7N7O5hkpeMP+TMMHLk7d3Sps3D4zpVqrhZlaJ/V3jYB3J3r3ytO18mmQzDJVtZUGrWHOTl5Wn27NkaNWqUTCaT3Zjc3Fzl5l7+RcjIKPnya3BEoNw93JWalG7Tnno6XTG17f+ChUaHKO2P8UlpCosOkSTrn6lJaYViQqNCrkW3XYqcOY6cOS4kwFce7m46m5Ft0342I1ux0WFF7hfg66Xv37hfXh7uMhuGXpu9Qmt3HbMb6+Xhrkf7tNPSdXuUlZNnN6YsCfbyl4ebu1LyMm3aU/LOqZp/pN19KvmGqWloDS1L3Kz/2zxDlf0i9O/aveXh5q5PDi2XJM0+slr+Hj6a0/pJWQxDbiaTph1YqmWJm+0esyzxdg+Rm8lDOWbbT1tzClIU5Gd/QHU0c6m83YLVOWa6TDLJzeSh/WnztCt1pjUmwLOyrg++Q3vSPtPO1E8U5l1XzSr8WxajQIczF5fkJZU4T/dQmUweyjMn27TnmZPl63md3X1Sz/+sykHDlJ6zXjkFRxXi01rhfl1lMpWP+6a4u4XJZPKQ2XLGpt1iOSNPz5p298nJXa3AwAeUm/e7CgqOyMe7nXx9b7XJWW7eZuWlPqaCgoNyd4tScNAoRVX4RqeSOsow/jnrqHBtlZrBwTfffKO0tDQNGTKkyJgJEyboxRdfdF6nAPzjZOXkaeBLs+Xn7amWdapqVP8OSkhO18a9ttUDD3c3vfZgD5kkTZj9z1lY6yg3mZSWd06v75oniwztzUxQBe8gDYztYB0c3BTVUDdXbKIXt3+uw1lJuj6wkh6N66nk3AwtObXRxVfgfJG+TVUvbKg2nH5dyTk7FOgZo2YVRqm+eZh2pMy4EGRyU0rObm09O0WSlJq7TyHeNXR98B1lfnBwNQ6lvKyaEa+qeeUfJBk6X3BMSee+VlRA6ZpmXJqkpo1TWOibqhj1syRDBQVHlJU9V/7+A6wxOTkrrX/P127lJm9S5Yrr5ed7m7KyP3dBr0sRFiQXqdQMDj7++GN169ZNlSpVKjJmzJgxGjVqlPXrjIwMxcSU7JzW9ORMmQvMCo0KtmkPjQxWamKa3X1SE9MU8sf4qBClXIy/9OeVbZe+Prj1yDXqueuQM8eRM8elnTuvArNF4UF+Nu3hQX5/emchw5BOnE6TJO07fkbVK4ZpaLeWNoMDD3c3vfZAD1UMD9KDb371j6gaSFJ6XpYKLGaFedmunwjzCtDZ3Ey7+yTnZcpsMctyxf9qR7NOK8I7SB4mdxUYZj0U111zDq/SiqStkqRD5xIV7ROie6rfWOYHB7nmNFmMAvm421ajfDzClFNw1u4+DcMf1OHM73Qw41tJUnreQXm4+ahl5DPakfKJJEM5BclKzztss1963hHFBNxYItfhTPnmVBlGgbzcI2zavdwjlG8+Y38fS4p2n35QJpOXPN1ClWdOUmzoaOUU2K/q/dOYLSkyjAK5u1WwaXdzqyCz+bTdfSyWs0o+O1SSt9zdQmW2JCokeKwK/iRnhpGh/IJD8vCofi27j3+YUlGvO3r0qJYvX6777rvvT+O8vb0VFBRks5W0gvwC7dt4SE06XZ5bajKZ1KRTA+36fZ/dfXat2acmN9nORW3auaF2X4xPPHxaZ0+lqkmny4sm/QJ9VbtVTe1aU/bn6JIzx5EzxxWYLdpzNEkt6lS1tplMUovaVbX90Kk/2dOWyWSSp6e79etLA4OYqBCNeOtrpWflXNN+u1KBYda+zAQ1C7s8TcEkk5qF1dTO9KN299medkSV/cJlumKNQYxfhJJzM1RgmCVJPm6eNoMHSTIbhs26hLLKogKl5O5RlN+VC7ZNivZtruSc7Xb38TB5XxiFXsEwLBf3vJCTMznbFORlOy0pyLOqsvLL/m2GDeUrM2+HQnxaX9FqUohPvDJy/3yqmWHkKc+cJJM8FOF3i85mLy/ZzpYa+crL3yYfn7ZXtJnk491WeXl/NcDOldmSKMlDvr7ddf780iIjTSY/eXhUk9mSdC06XaZdegias7eyoFQMDj755BNFRkaqe/furu6KXfPeXqRb7+ukmwd1UNXalfXolOHy8ffW0k8u3Et49MyRGvbqXdb4Be8uVotbGqvPqB6KqVVJ9zzfV3HNa+jb95dcjpm8WHeNvVPxPZsrtn5VjZ41UmdPpurXb9YXOn9ZRM4cR84cN3vZRt3evoF6tK6r2IphGvOvzvL19tTCX3dKkl4cdotG3nH5P9uh3VqoVd2qqhwRrNiKYfpXl2bqfkMdfX/Fcw4mPthDdWKj9Oz07+TuZlJ4kJ/Cg/zk4V4q/rn82+Ye/Vk9K7fULRWbqZp/pJ6sc7t83b20+OQGSdKz9frrgZqX73j1zfE1CvL002O1blOMX4TiI2rrnuo3af7xy4tNf03erUHVb1J8RG1F+4SqfYV66l+tnX46vcPp11cS9qR+pppBvVQ9sLuCPGPVIvIpebj56lDGIklSfNQLanTFbUoTsn7R9cF3qFrAzfL3qKRov5ZqGP6AErJ+liGL9ZgRPvVVN3SIAjyrqFpgV9UM7q396V+55BqvtYT0GYoO7K9I/zvk61lDNcNflpvJT0mZX0uS4iLeVGzI5fvvB3o1UrhfF/l4xCjIu7nqR30iyaQTGdOsMW4mP/l71ZG/Vx1JkrdHFfl71ZG3e/FuQFDaZWb+RwH+d8vfr688PK5XaMhEubn56VzWXElSeOi7Cg56xhrv5dVEvj63yt29qry9Wiky4jOZ5KaMzA+sMSHB4+TtFS939yry8mquiPAZkmFRdvY3zr48lCEun1ZksVj0ySefaPDgwfLwcHl37Prxy98UUiFIg1/sr9DoEB3cckTPdBuvtNMXFoNGVo2QYbk8HNy1Zp8m3D1ZQ14eqKHj71LC/lN64fbXdWTn5YcMffH6t/Lx99Hj/3lAASF+2vHLHo3pNl75uflOv76SQM4cR84ct2z9PoUG+OnBXq0VHuSnfcfP6JF35ivl4iLl6PBAGVd8guvj7amn7+6kyNBA5eYX6MipFD378fdatv5CtaVCSIA6NrnwqfrcFwbZnOv+N74stC6hLFqZtFUhXv66r0YXhXkH6kDmSf1708dKzTsnSYryCbGpApzOTdeoTR/p0biemnnDE0rOzdBXx37RnCOrrTFv7/lWw2t00b9r365QrwAl52Zo4Ym11jUJZd2xc8vl4x6qhuH3y8c9XKl5+7Qq4THrImU/jyjrm35J2pEyQ4YMNQx/UL4eFZRrTlNC1s/W9QWSlJK7Wz+dGq3G4Q+pQdi9OldwUhvPTNKRzKI/9S1LkrMXyzMlTNVCH5eXe4TO5e3WzqShyrdcmIrl7VFRuiJnbiZvxYaMko9nVZktWUo5/6P2Jv9bZsvl6W6B3g3UMPoz69c1wp6VJCWdm6d9yaOdc2ElKPv8QrmlhSs4aPTFh6Dt1Onku2SxXFjY7e5R2ebnzCQfhQQ/JQ+PqrJYspWTs0JnUx6RYVy+WYu7e0WFh394YdqR+axy89Yp6XR3WSz2p8QBkmQyDNfeV+mHH35Q165dtXfvXsXF2b9lY1EyMjIUHBysjuolD9M/46FOwD9FyrB4V3ehzPHtX/anlDjbQ7GrXd2FMinGkzeHjqrmkf3XQZAkZWZaVL/uaaWnpztlCrgjLr13bHLXeLl7+Tj13Oa8HG3+bGypzMuVXP5RfZcuXeTi8QkAAAAAlYLBAQAAAOBMrlggzIJkAAAAAGUKgwMAAAAAkphWBAAAgPKGJyQXicoBAAAAAElUDgAAAFDOsCC5aFQOAAAAAEhicAAAAADgIqYVAQAAoHxhQXKRqBwAAAAAkETlAAAAAOVQWVkg7GxUDgAAAABIonIAAACA8sYwLmzOPmcZQOUAAAAAgCQGBwAAAAAuYloRAAAAyhWekFw0KgcAAAAAJFE5AAAAQHnDQ9CKROUAAAAAgCQGBwAAAAAuYloRAAAAyhWT5cLm7HOWBVQOAAAAAEiicgAAAIDyhgXJRaJyAAAAAEASlQMAAACUMzwErWhUDgAAAABIYnAAAAAA4CKmFQEAAKB8MYwLm7PPWQZQOQAAAAAgicoBAAAAyhkWJBeNygEAAAAASQwOAAAAAFzEtCIAJcLrXBmpn5Yi3SvtcHUXypwYz7Ou7kKZ1MaHzwYddaLA1T3ANcUTkovEvw4AAAAAJFE5AAAAQDnDguSiUTkAAAAAIInKAQAAAMobHoJWJCoHAAAAACQxOAAAAABwEdOKAAAAUK6wILloVA4AAAAASKJyAAAAgPKGh6AVicoBAAAAAEkMDgAAAABcxLQiAAAAlCssSC4alQMAAAAAkqgcAAAAoLyxGBc2Z5+zDKByAAAAAEASlQMAAACUN9zKtEhUDgAAAABIYnAAAAAA4CKmFQEAAKBcMckFtzJ17umuGpUDAAAAAJKoHAAAAKC8MYwLm7PPWQZQOQAAAAAgicEBAAAAgIuYVgQAAIByxWS4YEFy2ZhVROUAAAAAwAVUDgAAAFC+8ITkIlE5AAAAAEqhDz74QLGxsfLx8VGrVq20bt26ImOnT5+udu3aKTQ0VKGhoercufOfxheFwQEAAADKFZNhuGRzxBdffKFRo0bp+eef16ZNm9SoUSN17dpVp0+fthu/evVqDRw4UKtWrdKaNWsUExOjLl26KCEhwaHzMjgAAAAAnCQjI8Nmy83NtRs3adIkDR8+XEOHDlXdunU1depU+fn5acaMGXbj58yZo4ceekiNGzdW7dq19dFHH8lisWjFihUO9Y/BAQAAAOAkMTExCg4Otm4TJkwoFJOXl6eNGzeqc+fO1jY3Nzd17txZa9asKdZ5srOzlZ+fr7CwMIf6x4JkAAAAlC+Wi5uzzynp+PHjCgoKsjZ7e3sXCk1OTpbZbFZUVJRNe1RUlPbs2VOs0z311FOqVKmSzQCjOBgcAAAAAE4SFBRkMzgoCa+99prmzp2r1atXy8fHx6F9GRwU020PdVXfJ29TWHSIDm49qg8enaG96w8UGd++zw0a/NIARcdWUML+RH309Gyt+36zTczgF/ur232dFBDir52/7tG7D01XwoHEkr4UpyFnjiNnjruzS2Pd3bO5woL9deDYGU36ZKV2HbR/fbfd1EDd2tfVdVUiJEl7Dydp6txfbOLXzP233X3fn/2j5izacO0vwAWahvVQq/A7FeARqtM5h/VD4hSdOr+vyPgWYb3UJKy7gjwr6Lw5Q3syftHqpJkyG/mSpPiIfqoV1FphXlVUYOQpIXu3ViXNUEqeY4vgSrOKgf9SleDh8nKvoHN5u3Xw7Is6l7fNbqxJHooJflCRAXfI2yNa2fmHdCT1daWe/8kaE+TdQlWChyvAq768PaK06/SDOpu9zFmXU/I8W8jkf5/kWU8m9yhZUkdIucv/fB+vljIFPiN5XC+ZT8nI+lA6P982xu/uC8d1qyDl75GR+ZKUb//7UBYF+A9RUOBDcnevoLz8XUpNHau8/C1FRHsoKPAR+fv3k4d7tPLzDyotfbxycldZI4KD/q3goCdt9srPP6BTSe1K7iLKiKtZIHwtzllcERERcnd3V1JSkk17UlKSoqOj/3TfN998U6+99pqWL1+uhg0bOtxPl645MJvNeu6551S9enX5+vqqRo0aevnll2U4+Zv1Vzr0a60H3hqs2S99pRHNntKhbUc1YclYhVSwP+qrGx+nZz57XEtmrNSIpqP167fr9MKC0YqtF2ON6T+6l3o/0k2TR0zTIzeMUU5WriYseVae3p7OuqwSRc4cR84c1ym+lh69p4M+/nqNhoz5r/YfPaO3x9yp0CBfu/FN68Zo2a97NPLlL3X/uM+VdDZT7zxzpyqEBlhjuj8wxWZ7ZcoSWSyGVq3b76zLKlF1gtqrU9Rw/XLmM8049IiScg6pf7WX5ecebDe+bnBHdYwaql/OfKbpBx7QdwnvqE5Qe3WMHGKNqepXXxtTFunTw6M098hYuZncNaDaeHmaCpfKy6IIv+66LuwZHUt7V5tP3qasvD2qHzVTnm7hduOrhY5SdOBAHUx5SRsTuiox8zPVqTBF/l51rTHubn7KytujgykvOOkqnMzkKxXskZHxYvHi3avIFDJdyvtdRvJtMrJnyhQ0XvJqeznG51aZAp+Rce59Gcm9pYLdMoXOkNwcm09dWvn53qbQkBeUnvGWTiV1VX7eLkVW+FxuRfychQQ/pYCAe5SaNlYnEzvoXNanioj4WJ6e9W3i8vL36MTJhtYt6UwvZ1wO/iYvLy81a9bMZjHxpcXF8fHxRe73+uuv6+WXX9aSJUvUvHnzqzq3SwcHEydO1JQpU/T+++9r9+7dmjhxol5//XW99957ruxWIXc+0UPff7RCS2eu1rHdJzT5wWnKzc5T12E32Y2//dHuWr9ki756c6GO7UnQrHFf6MCmQ+o18pbLMY9115zx87Rm4QYd3n5MEwe/r/BKoWrTu4WzLqtEkTPHkTPHDezeTAtXbtfiH3fqSEKKXv9omXLz8tWjYwO78S+8/53mL9uq/UfP6OjJFE34zw9yM5nUvH5Va0xKerbN1q55TW3adUwnT6c767JKVMvw27U1dYm2py3T2dzjWnLqfRVYctUwtIvd+Cq+dXQie5d2pa9Wev5pHc7arF3pP6qib5w15otj47Q9bbmSc4/pdO5hLUqYpGCvSEX7Xu+syypRlYOHKTHzCyWdm6fs/AM6cPZZWYzzigrsYzc+0r+3jqdPUer51copOK5TmZ8p9fxqVQm61xqTev5HHU2bpLPZPzjrMpwr7ycZ596WcotXDTH5DpTMJ2RkviaZD0rZs6WcJTL5D70c4zdMyv5COj9PMh+QkTFOMs5Lvva/D2VNYOADOpc1R1nZX6igYJ9S0kbLYpxXgP9Au/F+fn2UkfGucnJWymw+pnNZnyrn/EoFBTxoG2gUyGI5c8WW4oSrwbUwatQoTZ8+XbNmzdLu3bs1YsQIZWVlaejQC78XgwYN0pgxY6zxEydO1HPPPacZM2YoNjZWiYmJSkxM1Llz5xw6r0sHB7/99pt69eql7t27KzY2Vn369FGXLl2u6oENJcXD00Nxza7TpuWXy5aGYWjT8m2qe0Oc3X3qxsdp0wrbMueGH7aqzsX46OqRCq8Yqs3Lt1tfz87I1p61B1Q3vlYJXIVzkTPHkTPHebi7qVb1KK3ffszaZhjS+u3HVD+uYrGO4ePtIQ8PN2Vk5dh9PTTYT22a/H97dx4eVXX/cfx9ZyaTfQ8kAdkk7JuASAMqKlQERbAKiBQBcflpqSAKiiiiiFqKFmxdaluQuoIbWBeoG1QW2UHZFwmbEEjIvk1m5v7+IEwYmSijZIaQz+t57kPmzPfec+43EHLmLLcJ//lq81lpc7BZDBsp4WnsLdp4SqlJRtFG6oe39HnOwZJtpISneToDcSEpNI2+mD2Fa6qsJ8waCUCJq+BsNT1oDEKItrclt3TFKaUmuaUriAnt6PMci2HHbXpvTeg2S4kJ+2Wf4tUK9o7gWOFVZDqWQcjJHIdASBtMh/f3AccKjBDf34eaJQR7SHtKS78+pcyktPRr7PbOPs8wsGP+6O+ZaZYSGnqJV5nNdiH1UjdQL+UbEhNewGqtf7YbXzOZQTr8MHjwYGbMmMHkyZO56KKL2LhxI4sWLfIsUt6/fz+HDx/2xL/00ks4HA5uuukmUlNTPceMGTP8qjeoaw66devGK6+8ws6dO2nevDmbNm1i2bJlPPfccz7jy8rKvPaCzc/Pr/Y2xiZFY7VZycn0/tQw52geDVr6/gcWnxJH7o/jM3NJSIkD8PyZk5l7Wkx8ctzZaHZQKWf+U878FxcTjs1q4XhekVf58bxiGtU/s2kG99xyOcdyiljz3T6f7/e9vA3FpQ6WnCdTiiKsMVgMK8XOHK/yImcuiRENfJ6zNW8JEdYYhjX+MxgGVsPG+uMfszJrfhW1GPRKuYsDRVvIKvOd15okxBqPYdhwuLK8yh2uLMJDLvR5Tk7J19SPuY280jWUOvcRF9aNxIjeGIZ2D6+SJQnT7Z1jXFkYlmhMQsESi2HYfMRkg71p4NpZTayWBAzDhst9zKvc7T5GSEiaz3NKy5YQHX0XZY5vcDozCAu9jPDwvl5/z8ocG3DkjMHp3IPVkkxszDiS6yzgcOYVmGaRz+vKuWX06NGMHj3a53tLlizxep2RkXFW6gxq5+Chhx4iPz+fli1bYrVacblcTJs2jaFDh/qMf/rpp3n88TOcvygi8hOGXX8Jv+3WgnuemI+j3OUzpt8VbVm8bHuV79cGDSPakZ40iMWHX+SHkh3E21PplXIX3esMYfmxt06L7516D0mhjXh97wM+rlY7fH98KmlJT3Fx/f8CJiXO/WQWvkty1MBgN03OIzm5k0mIn0Fq8teAidOZQVHx20RG3uyJKS390vN1Odsoy1pP/dQ1RIRfT1Hx6f9+axXTPHEEus4aIKgfY8yfP5833niDN998k/Xr1zN37lxmzJjB3LlzfcZPnDiRvLw8z3HgwIFqb2NeVgEup4v4ZO/FevF1Y8k5kuvznJwjucT9OD45juMV8Sf//PGnt/HJcad9ylsTKWf+U878l5tfgtPlJiE20qs8ITaC7Nyf/kTslusuZlj/Lox56j327M/yGdOhZX0a1U/gwy+/8/l+TVTsysdtuoiwxXuVR9riKHT6nod8ed1hbM77kk25izlWlsHOgpUsPTqX9KSBgOEVe3XK3aRFX8KbGQ9R4MyurtsIqHJXDqbpxG5N8iq3W5Modx3zfY77ONuO/h/L97dl9cHLWXfot7jMYkqd+33GC+DOwrB45xhrEqa7ACgD94nvA6fFJILb9/ehJnG5j2OaTqyWOl7lFksdXK6jPs9xu7PJyh7JgUNN+eFwFw5nXoZpFuH8ib9npplPufN7bLYmZ7X9cn4Jaudg/PjxPPTQQ9x88820a9eOYcOGcd999/l8UhyceEjEyb1hA7FHLICz3MnOdd/TsWflAkfDMOjYsx1bv/G99d/WlTvpeJX3gshOvdqzrSL+yN6jZB/OoWPPyh0FIqLDadk1ja0rd1TDXQSWcuY/5cx/TpebHXszvRYTGwZc3LYhm3cervK8of26MPJ3v+G+p99n+/eZVcb1u7It2/YcYff+mv+Lx0lu08mRkt00juxwSqlBo8iLOFTi+6E6Nkso5o8mypqmu+LMys7B1Sl30zwmnTczJpJXXnVeaxqTcgocm4kL63ZKqUFcWDr5ZRuqPA/ANB04XJkY2EiKuIbs4p/ZyrM2c2wAu/cOLIa9O5SfzHE5lG/B8IoxwN4Ns/ynvw81QzmO8m8JCztldyYMwkIvxeFY9zPnluFyHwFshIdfS0nJ4iojDSMCm60RLvf582/0lzLM4Bw1QVA7B8XFxVgs3k2wWq243YF+ZN1Pe+8vH9H39p789tYeNGxZn3tfuoOwyFAWzzmxl/CEV0dz21O3eOI/eP5julxzETeNu44GLeox7LGBNL+4KQv/tqgyZtbH3DLpRtL7XUzjtg2ZMHc02T/ksHxB1Yv8ahLlzH/Kmf/e+ngd11/Vjr6Xt6ZRvQQmjOpFWGgIHy09sYB48j3XcPfNlf/Z/v76Ltw5qBvTXl7M4WN5JMRGkBAbQfiPtnaNCLdzVdcW/Oer82fU4KTV2R9wUfw1tIvtSaK9Adek/oEQSyjf5pzYVea6+vfT45RtSncXrKZT/LW0irmc2JBkGkd25PK6w9hVsBqz4nGfvVPvoU3clSw8OB2Hu4RIWzyRtnhshj0Yt3jWHcqbTUr0YOpG/o7wkKakJU7FYkSQWfAuAM2TZtA4rnIaVbS9A4kRVxNma0BM6MW0TZ4DGBzMf8UTYzEiiLS3ItLeCoBQ2wVE2lsRaj2zxfTnPCMCbK1OHADWC058bTlxf0bU/Rix0z3hZslbYG2AETUBrBdC+C0Q1gezaE5lTPFsiBgMYTeAtSlGzBMntkwteS+gt1ZdCgr+TlTkUCIjBmKzNSM+7k9YLBEUFr0NQGL888TGPOyJt9s7Eh7WF6u1IaH2rtRNehMDC/kFL3hi4mInE2pPx2q9ALv9YpISZ4Ppprh4QaBvT2qQoK456NevH9OmTaNhw4a0adOGDRs28Nxzz3HbbbcFs1mnWTp/BXF1Yhj++GDiU+LYszGDh/tMI7dia8O6DZMw3ZXdwa0rd/L00FmMmDqEkdNu4dCuw0y5YToZWyqnQc2bvpCwyDDG/v0uouIi2LxsOxP7TKO8rDzg91cdlDP/KWf++2LlDuJjwrl9YHcS4yLYte8Y9z3zHjl5xQAkJ8XgPmWO5+9+2wF7iI2nx13vdZ1/vruCf7270vP6t91aYBjw3+Vn9oj6mmRb/v+IsMVwWd1hRNriOVr6PfP3TabYlQtATEgdz8gAULGuwKRH3VuJCkmk2JnH7sLVLM2snP7ZKeE6AH7fZPqpVfHRoef4Lrfmf1qeVfwxIccTaBQ/Frs1iULHNrZkjqTcfWLqVKgtFajMmcUIpXHcOMJCGuJyF3G8ZCk7su7H5a7cvSk6tB3tU970vG6a8AgAmYXvsTNrQmBurDqFtMWS8IbnpSVmEgBmyfuYeQ+CtS5Y61XGuw5i5t6BET0JI3I4uI5g5k8Cx7LKmNJPMC0JGNFjKh6Ctg0zZxS4z48pbMUlH2LJTSQ2ZkLFQ9C2cDTrFtwVi7CttvqeDjmAQRhxsQ9iszXE7S6mtPQLso//EdOs3KzFak0lMfFFrJZ4XK5syhyryTx6Le7zJGdSPQwziE8cKygo4NFHH+WDDz7g6NGj1KtXjyFDhjB58mTs9p//xCk/P5/Y2FiuoD824/x4qJPI+aJw0G+C3YQa5/pHv/j5IPFyaWTNnyIXDN3DtHOSvw46/dsrvjYrKHDTtvVR8vLyAjIF3B8nf3fskf4INltYQOt2OktZuvLJczIvpwrqyEF0dDQzZ85k5syZwWyGiIiIiIgQ5M6BiIiIiEigGe4TR6DrrAk0rigiIiIiIoA6ByIiIiIiUkHTikRERESkdtETkqukkQMREREREQE0ciAiIiIitY1ZcQS6zhpAIwciIiIiIgKocyAiIiIiIhU0rUhEREREahXDNDECvEA40PX9Uho5EBERERERQCMHIiIiIlLbaCvTKmnkQEREREREAI0ciIiIiEhtYwLuINRZA2jkQEREREREAHUORERERESkgqYViYiIiEitoq1Mq6aRAxERERERATRyICIiIiK1jUkQtjINbHW/lEYOREREREQEUOdAREREREQqaFqRiIiIiNQuekJylTRyICIiIiIigEYORERERKS2cQNGEOqsATRyICIiIiIigEYORERERKSW0UPQqqaRAxERERERAdQ5EBERERGRCppWJCIiIiK1i7YyrZJGDkREREREBNDIgYiIiIjUNho5qJJGDkREREREBFDnQEREREREKmhakYhUi6IUffbgrzW5jYLdhBpnaOyGYDehhooKdgNqnAtsytmZyre5gaPBbsZP07SiKul/bxERERERATRyICIiIiK1jRswglBnDaCRAxERERERATRyICIiIiK1jGGaGAFeAxDo+n4pjRyIiIiIiAigzoGIiIiIiFTQtCIRERERqV20lWmVNHIgIiIiIiKARg5EREREpLZxm2AE+JN8t0YORERERESkBlHnQEREREREAE0rEhEREZHaRguSq6SRAxERERERATRyICIiIiK1ThBGDtDIgYiIiIiI1CAaORARERGR2kVrDqqkkQMREREREQHUORARERERkQqaViQiIiIitYvbJOALhPWEZBERERERqUk0ciAiIiIitYvpPnEEus4aQCMHIiIiIiICqHMgIiIiIiIVNK1IRERERGoXPeegSho5EBERERERQCMHIiIiIlLbaCvTKmnkQEREREREAI0ciIiIiEhtozUHVdLIgYiIiIiIAOociIiIiIhIBU0rEhEREZHaxSQI04oCW90vpZEDEREREREBNHJwxq6/pzcDH7iehJQ49mzaxwv3zmbHmt1Vxl9+028Y/sTNpDSuw6FdR/jnQ6+z+tMNXjHDHx9Mn9t7EhUXyZbl23n+nn9waPeR6r6VgFHO/Kec+W/wpR0YcVVnkqIj2fnDMZ5+7ys278/0GduzfRq397qEBnViCbFY2ZeVw7+/Ws9Ha7d5xTVJTuC+fpfSuekF2CwW9mRmM272RxzJLQjELVW7a1J6MKD+b4mzx5BRdJB/fj+P3YX7fMY+0fY+2sY2P6183fHvmLbtRQAGN7iW7kkXkxQaj9N0sadwP2/uW8iuwozqvI2AioocQUz0PVitdXCUbyUnZxKO8o1VRNuIif4jkZGDsFlTKC/fQ27eNErLvvJExMbcT2zMA15nlZfv5nDmZdV3E4EU0gUj8nYIaYNhTcadczeUff7T59gvwYh+GGzNwHUYs+hFKHnfOyZi6InrWupA+XbMgieg/Nvqu49AUs4CSwuSqxTUkYOCggLGjh1Lo0aNCA8Pp1u3bqxZsyaYTfKpx6Bu3PXscF5/4h3u7vwg33+7j6cXTSKuTozP+NbpzXn4zbEsmv0ld3eawPKFq5nywQQat2ngiRk8oT8D/tiHWXe/wh9/M5HSojKeXvQIIaEhgbqtaqWc+U8581/vjs0ZP+ByXl70DYNnvMGOQ1m8/H+/IyEq3Gd8XnEp//hsFcNmzuPG6a+xcNVWnhhyNd1aNvLEXJAYy9x7B7E3M4dRf3uHG6e/xiuLV+FwOgN1W9Wqe1JnRja5kfkHPuaBjU+RUXSQyW3uJTYk2mf89O1/57bVD3qOMeufwGW6WJG93hPzQ8lR/vn9PO7b8CSTvp3BsbJsJre5lxhbVKBuq1pFhF9PfNwU8vKf5XBmb8odW6lb5y0slkSf8XGxDxIVNYyc3En8cKQHhUX/JinpX4SEtPWKc5Rv5+AP7T1H5rH+gbidwDDCwbkdM//xM4u3XoAR9w9wfIOZdT1m8asYMdPAfmllTFhfjOiHMQv/hpk1AJzbMOJngyWhWm4h4JQzOUcEtXNw++2389lnn/Haa6/x3XffcfXVV9OrVy8OHToUzGad5sb7ruPTf37B4leXsH/bQWb93yuUFTvofdtVPuNvuPda1izayDszPmT/9kPMnTyP3eu/p//oaypjxlzLG9PeY+WHa9n73X7+NPxvJNaLp/uALoG6rWqlnPlPOfPfrVd04r2Vm1m4eivfZx5n6jufU+JwMqBrW5/xa3cf5Mvv9rA38zgHs/N4438b2PXDMTo2qeeJ+eO13fl6awZ/+c/XbD90jIPZeSzZ8j3HC0sCdVvVql+9nnyWuZwvj67kYMkR/r7nLcpcDq6qm+4zvtBZTG55vufoENeKMpeDFVmVnYOvs9bwbd52MsuyOFBymDl73yXSFk6jyPqBuq1qFR19F4VFb1BUPA+ncyfHcyfgNkuIihziMz4i4iby85+ntPRLXK79FBb9m9KSL4mJ+j/vQNOJ233slON4AO4mQBz/wyz8C5R9dkbhRvgQcB3ELHgGXHug+HUoXYQRObIyJuI2KJ4HJe+Bazdm/mQwSyD8puq6i8BSzuQcEbTOQUlJCe+99x7Tp0/n8ssvJy0tjSlTppCWlsZLL70UrGadxhZio3nnC1n/eeUQnGmarP/8W1r/5vShdjjxie76L7yH7Nb+dxOtKuJTmtQlMTWeDZ9/53m/OL+Y7at20zq9RTXcRWApZ/5Tzvxns1podUEy3+zc7ykzTVi1cz8dGqee0TW6NmtA47oJrNtz4gMJw4DLWzdh37EcXvq/G1gy9S7euO9mrmzXtFruIdBshpWmUQ35Nne7p8zE5Nu87bSIvvCMrtEzuRvLstZS5nZUWcfVyZdS5Cwmo+jgWWl3cIVgD2lPaenXp5SZlJZ+jd3e2ecZBnZMs8yrzDRLCQ29xKvMZruQeqkbqJfyDYkJL2C1nh+dqV/E3hEcK7yKTMcyCOlY8SoEQtpgesWY4FiB4YmpZZSzX8ftDs5RAwRtzYHT6cTlchEWFuZVHh4ezrJly3yeU1ZWRllZ5Q/c/Pz8am0jQGxSNFablZzMPK/ynKN5NGjp+wd5fEocuT+Oz8wlISUOwPNnTmbuaTHxyXFno9lBpZz5TznzX3xkODarheyCYq/y7IJimiTHV3leVJidzx+/gxCbFbfbZNq7X3o6GAlREUSG2RnVswt//WQ5M/+zjO4tG/OXkf0Y9cI7nk5ETRUdEoXVsJJb7v2zM9eRT/3Y5J89Py2qEY0i6/PC7tdOe69zfFvGtRhFqMVOjiOfx7c8T4Gz6Ky1PVislgQMw4bLfcyr3O0+RkhIms9zSsuWEB19F2WOb3A6MwgLvYzw8L4YRuXncWWODThyxuB07sFqSSY2ZhzJdRZwOPMKTLPm581vliRMd5Z3mSsLwxKNSShYYjEMm4+YbLCfH513vylnUk2C1jmIjo4mPT2dqVOn0qpVK5KTk3nrrbdYuXIlaWm+f+A+/fTTPP74Gc7FExHxoajMwcA/v05EqJ2uzRrwwIDLOZidx9rdB7EYBgBfbd7D60tPLOzecegYFzVJZVD39jW+c/Br9UruTkbRQZ+Llzfn7eT+jU8RY4uiV0p37m9xOw99O5288vNjEbc/cnInkxA/g9TkrwETpzODouK3iYy82RNTWvql5+tytlGWtZ76qWuICL+eouK3gtBqkVpGC5KrFNQ1B6+99hqmaVK/fn1CQ0N5/vnnGTJkCBaL72ZNnDiRvLw8z3HgwIFqb2NeVgEup4v45Fiv8vi6seQcyfV5Ts6RXOJ+HJ8cx/GK+JN//vjT2/jkuNM+5a2JlDP/KWf+yykqwelykxgd4VWeGB1BVn5xFWed+Nl8ICuPHYeO8e8l6/l84y5G9eriuWa5y8WeI9le53yfeZyUON8Lw2uSgvJCXKaLuBDve4mzx5Dr+OmR2FCLne5JF/NF5gqf75e5HRwpPcbOwr28uPt1XKabnnW7nbW2B4vLfRzTdGK11PEqt1jq4HId9XmO251NVvZIDhxqyg+Hu3A48zJMswinc7/PeADTzKfc+T02W5Oz2v4aw52FYUnyLrMmYboLgDJw52CaTjgtJhF+NKpTayhnUk2C2jlo2rQpS5cupbCwkAMHDrB69WrKy8u58ELfc19DQ0OJiYnxOqqbs9zJznXf07FnO0+ZYRh07NmOrd/s9HnO1pU76XhVO6+yTr3as60i/sjeo2QfzqFjz8pFkxHR4bTsmsbWlTuq4S4CSznzn3LmP6fLzbaDmXRtVrk7k2FA1+YN2JRx+IyvY1gM7Dar55pb9mfSuK73Th6N6sRzOKf6pzFWt5PbjLaPrVxzYmDQPrYFOwq+/8lzuyV1IsRiY+mx1WdUlwWDEMv5sFt2OY7ybwkLO2UHGAzCQi/F4Vj3M+eW4XIfAWyEh19LScniKiMNIwKbrREut+9teM97jg1g914Ub9i7Q/nJrZnLoXwLhleMAfZumOXe2zfXGsrZr3Ny5CDQRw1wTjwELTIyktTUVHJycli8eDH9+59b27m995eP6Ht7T357aw8atqzPvS/dQVhkKIvnnNizesKro7ntqVs88R88/zFdrrmIm8ZdR4MW9Rj22ECaX9yUhX9bVBkz62NumXQj6f0upnHbhkyYO5rsH3JYvuDc28r1l1DO/Kec+e/fS9ZzY3o7ru/SmibJCTwysCfh9hAWrNoCwLShvbn3uu6e+FG9uvCb5g2pnxhLk+QEbr2iE9dd3IqP11Yu0H31y7Vc07E5N/6mLQ2SYrn50g70aHMh85ZtCvj9VYf//PAFvVIu5Yo6v6F+eAp3NR1CqDWUL4+uBODeZsMZ2uj0n8E963ZndfYmCn+0jiDUYmdow/40j2pCndAELoxsyB/ShpEQGue1o1FNVlDwd6IihxIZMRCbrRnxcX/CYomgsOhtABLjnyc25mFPvN3ekfCwvlitDQm1d6Vu0psYWMgveMETExc7mVB7OlbrBdjtF5OUOBtMN8XFCwJ9e9XDiABbqxMHgPWCE19bTmwWYETdjxE73RNulrwF1gYYURPAeiGE3wJhfTCL5lTGFM+GiMEQdgNYm2LEPHFi+8+S9wJ6a9VGOZNzRFA/1lm8eDGmadKiRQt2797N+PHjadmyJSNHjvz5kwNo6fwVxNWJYfjjg4lPiWPPxgwe7jON3KMnFoPWbZiE6a7sDW5duZOnh85ixNQhjJx2C4d2HWbKDdPJ2FI5DWre9IWERYYx9u93ERUXweZl25nYZxrlZeUBv7/qoJz5Tznz3+INO4mPDOeePukkxUSw49Ax7v77BxwvPDGtKCU+Gvcpn9SE20OYNPAqkmOjKSt3svfocR5+fRGLN1SOznz53R6mvvMFo3p14cHfXUnGseOMm/MfNuz9IeD3Vx2WZ60jxhbFkIbXEWePYW/RQaZu+atnbUBSaIJXzgDqhSfTOjaNxzfPOu16btNN/Yhkrqh7JzEhkRQ4i9hdsI9HvnuWAyVnPoJzLisu+RBLbiKxMRMqHoK2haNZt+CuWOhptdXHpHIXEoMw4mIfxGZriNtdTGnpF2Qf/yOmWTn6ZLWmkpj4IlZLPC5XNmWO1WQevRa3O/u0+mukkLZYEt7wvLTETALALHkfM+9BsNYFa+UWwrgOYubegRE9CSNyOLiOYOZPAscpG5SUfoJpScCIHlPxQK9tmDmjQDmrvTmTamGYZvDGOObPn8/EiRM5ePAgCQkJ3HjjjUybNo3Y2NifP5kTuxXFxsZyBf2xGefHQ51EzheZ99b8+eaBljbQ9xQyqdrMRguC3YQa6YLz5AF1cm7KL3AT3/x78vLyAjIF3B8nf3fslTASm8Ue0LqdbgefH59zTublVEEdORg0aBCDBg0KZhNERERERKTC+bBaTERERETkjJmmG9MM7EPJAl3fL3VOLEgWEREREZHgU+dAREREREQATSsSERERkdrGNMEd4D159JwDERERERGpSTRyICIiIiK1i2kCGjnwRSMHIiIiIiICqHMgIiIiIiIVNK1IRERERGoXtxuMAD93QM85EBERERGRmkQjByIiIiJSu2hBcpU0ciAiIiIiIoBGDkRERESkljHdbswArzkwteZARERERERqEnUOREREREQE0LQiEREREalttCC5Sho5EBERERERQCMHIiIiIlLbuE0wNHLgi0YOREREREQEUOdAREREREQqaFqRiIiIiNQupgkE+LkDmlYkIiIiIiI1iUYORERERKRWMd0mZoAXJJsaORARERERkZpEIwciIiIiUruYbgK/5iDA9f1CGjkQERERERFAnQMREREREamgaUUiIiIiUqtoQXLVNHIgIiIiInIOeuGFF2jcuDFhYWF07dqV1atX/2T8O++8Q8uWLQkLC6Ndu3Z88sknftepzoGIiIiI1C6mOziHH+bNm8e4ceN47LHHWL9+PR06dKB3794cPXrUZ/yKFSsYMmQIo0aNYsOGDQwYMIABAwawefNmv+pV50BERERE5Bzz3HPPcccddzBy5Ehat27Nyy+/TEREBLNnz/YZP2vWLK655hrGjx9Pq1atmDp1Kp06deJvf/ubX/XW6DUHJ+duOSmHmjGNS6TWcJWVBrsJNU55kSPYTahxCgpqxtaA55p8m/Im1Se/8MTfr3N5jn0wfnd0Ug5Afn6+V3loaCihoaFeZQ6Hg3Xr1jFx4kRPmcVioVevXqxcudLn9VeuXMm4ceO8ynr37s2CBQv8ameN7hwUFBQAsAz/51OJSDV7eWGwW1DjbH052C2oeT4KdgNqLN/TEkTOpoKCAmJjY4PdDC92u52UlBSWHQnO745RUVE0aNDAq+yxxx5jypQpXmVZWVm4XC6Sk5O9ypOTk9m+fbvPax85csRn/JEjR/xqY43uHNSrV48DBw4QHR2NYRjBbo6X/Px8GjRowIEDB4iJiQl2c2oE5cx/ypn/lDP/KWf+U878p5z571zNmWmaFBQUUK9evWA35TRhYWHs3bsXhyM4I7WmaZ72O+uPRw2CrUZ3DiwWCxdccEGwm/GTYmJizql/sDWBcuY/5cx/ypn/lDP/KWf+U878dy7m7FwbMThVWFgYYWFhwW7GT0pKSsJqtZKZmelVnpmZSUpKis9zUlJS/IqvihYki4iIiIicQ+x2O507d+aLL77wlLndbr744gvS09N9npOenu4VD/DZZ59VGV+VGj1yICIiIiJyPho3bhzDhw/n4osv5pJLLmHmzJkUFRUxcuRIAG699Vbq16/P008/DcCYMWPo0aMHzz77LNdeey1vv/02a9eu5ZVXXvGrXnUOqkloaCiPPfbYOTeP7FymnPlPOfOfcuY/5cx/ypn/lDP/KWfnt8GDB3Ps2DEmT57MkSNHuOiii1i0aJFn0fH+/fuxWConAXXr1o0333yTRx55hIcffphmzZqxYMEC2rZt61e9hnku7zMlIiIiIiIBozUHIiIiIiICqHMgIiIiIiIV1DkQERERERFAnQMREREREamgzoGInNeuuOIKxo4d63nduHFjZs6cGbT2nDRlyhQuuuiigNc7YsQIBgwY8Kuu8eqrrxIXF/eTMWfr/s5Ge0VE5MypcyAiv8qIESMwDAPDMAgJCaFJkyZMmDCB0tLSYDfNpzVr1nDnnXdW2/WXLFniyUdVx5IlS6qt/poiIyPDKyd2u520tDSefPJJTt1Eb9asWbz66que1z/u7ImIyNml5xyIyK92zTXXMGfOHMrLy1m3bh3Dhw/HMAz+9Kc/Bbtpp6lTp061Xr9bt24cPnzY83rMmDHk5+czZ84cT1lCQsIv6iA4HA7sdvvZaOY54/PPP6dNmzaUlZWxbNkybr/9dlJTUxk1ahQAsbGxQW6hiEjtopEDEfnVQkNDSUlJoUGDBgwYMIBevXrx2Wefed4vKyvj3nvvpW7duoSFhXHppZeyZs0az/u+pqksWLAAwzA8r09OU3nttddo3LgxsbGx3HzzzRQUFHhiioqKuPXWW4mKiiI1NZVnn332tLb+eFqRYRj885//5IYbbiAiIoJmzZrx4Ycfep3z4Ycf0qxZM8LCwrjyyiuZO3cuhmGQm5t72vXtdjspKSmeIzw83JOfk8epv+D/1P1cccUVjB49mrFjx5KUlETv3r0B2Lx5M3369CEqKork5GSGDRtGVlaW57x3332Xdu3aER4eTmJiIr169aKoqMirnTNmzCA1NZXExET+8Ic/UF5e7nkvJyeHW2+9lfj4eCIiIujTpw+7du067V5P9cwzz5CcnEx0dDSjRo0645GjxMREUlJSaNSoEUOHDqV79+6sX7/e8/6p04pGjBjB0qVLmTVrlmfEISMjg5ycHIYOHUqdOnUIDw+nWbNmXp0xERE5c+ociMhZtXnzZlasWOH1C/CECRN47733mDt3LuvXryctLY3evXtz/Phxv669Z88eFixYwEcffcRHH33E0qVLeeaZZzzvjx8/nqVLl7Jw4UL++9//smTJEq9fNKvy+OOPM2jQIL799lv69u3L0KFDPW3bu3cvN910EwMGDGDTpk3cddddTJo0ya92/9L7AZg7dy52u53ly5fz8ssvk5uby1VXXUXHjh1Zu3YtixYtIjMzk0GDBgFw+PBhhgwZwm233ca2bdtYsmQJv/vd77ym6nz11Vfs2bOHr776irlz5/Lqq696Td0ZMWIEa9eu5cMPP2TlypWYpknfvn29OhCnmj9/PlOmTOGpp55i7dq1pKam8uKLL/qdj7Vr17Ju3Tq6du3q8/1Zs2aRnp7OHXfcweHDhzl8+DANGjTg0UcfZevWrXz66ads27aNl156iaSkJL/rFxERwBQR+RWGDx9uWq1WMzIy0gwNDTUB02KxmO+++65pmqZZWFhohoSEmG+88YbnHIfDYdarV8+cPn26aZqmOWfOHDM2Ntbruh988IF56o+oxx57zIyIiDDz8/M9ZePHjze7du1qmqZpFhQUmHa73Zw/f77n/ezsbDM8PNwcM2aMp6xRo0bmX/7yF89rwHzkkUc8rwsLC03A/PTTT03TNM0HH3zQbNu2rVfbJk2aZAJmTk7OGeWnf//+p5X/3P2Ypmn26NHD7Nixo9d5U6dONa+++mqvsgMHDpiAuWPHDnPdunUmYGZkZFTZnkaNGplOp9NTNnDgQHPw4MGmaZrmzp07TcBcvny55/2srCwzPDzck9sff7/S09PNe+65x6uerl27mh06dPDZBtM0zb1795qAGR4ebkZGRpohISEmYN55552ntffU/PXo0cPr+2maptmvXz9z5MiRVdYlIiJnTiMHIvKrXXnllWzcuJFVq1YxfPhwRo4cyY033gic+HS8vLyc7t27e+JDQkK45JJL2LZtm1/1NG7cmOjoaM/r1NRUjh496qnH4XB4feqckJBAixYtfva67du393wdGRlJTEyM57o7duygS5cuXvGXXHKJX+2uyk/dz0mdO3f2er1p0ya++uoroqKiPEfLli2BEzno0KEDPXv2pF27dgwcOJB//OMf5OTkeF2jTZs2WK1Wn/Vu27YNm83mlcfExERatGhR5fdr27Ztp33an56efkY5mDdvHhs3bmTTpk3Mnz+fhQsX8tBDD53RuSfdfffdvP3221x00UVMmDCBFStW+HW+iIhUUudARH61yMhI0tLS6NChA7Nnz2bVqlX861//OuPzLRaL17QXwOcUlpCQEK/XhmHgdrt/WaMDcN2zUW9kZKTX68LCQvr168fGjRu9jl27dnH55ZdjtVr57LPP+PTTT2ndujV//etfadGiBXv37vWr3kBp0KABaWlptGrVioEDBzJ27FieffZZv3a76tOnD/v27eO+++7jhx9+oGfPnjzwwAPV2GoRkfOXOgciclZZLBYefvhhHnnkEUpKSmjatKlnzvxJ5eXlrFmzhtatWwMndhAqKCjwWjS7ceNGv+pt2rQpISEhrFq1ylOWk5PDzp07f9X9tGjRgrVr13qVnbqYOtA6derEli1baNy4MWlpaV7HyY6EYRh0796dxx9/nA0bNmC32/nggw/O6PqtWrXC6XR65TE7O5sdO3Z4vl++zjk1HuCbb775RfdntVpxOp04HA6f79vtdlwu12nlderUYfjw4bz++uvMnDmTV1555RfVLyJS26lzICJn3cCBA7FarbzwwgtERkZy9913M378eBYtWsTWrVu54447KC4u9mxX2bVrVyIiInj44YfZs2cPb775ptcC2TMRFRXFqFGjGD9+PF9++SWbN29mxIgRWCy/7sfcXXfdxfbt23nwwQfZuXMn8+fP97Tt1N2UAuUPf/gDx48fZ8iQIaxZs4Y9e/awePFiRo4cicvlYtWqVZ6Fwfv37+f999/n2LFjtGrV6oyu36xZM/r3788dd9zBsmXL2LRpE7///e+pX78+/fv393nOmDFjmD17NnPmzGHnzp089thjbNmy5Yzqy87O5siRIxw8eJBPP/2UWbNmceWVVxITE+MzvnHjxqxatYqMjAyysrJwu91MnjyZhQsXsnv3brZs2cJHH310xvcrIiLe1DkQkbPOZrMxevRopk+fTlFREc888ww33ngjw4YNo1OnTuzevZvFixcTHx8PnFgb8Prrr/PJJ5/Qrl073nrrLaZMmeJ3vX/+85+57LLL6NevH7169eLSSy89bc6+v5o0acK7777L+++/T/v27XnppZc8uxWFhob+qmv/EvXq1WP58uW4XC6uvvpq2rVrx9ixY4mLi8NisRATE8P//vc/+vbtS/PmzXnkkUd49tln6dOnzxnXMWfOHDp37sx1111Heno6pmnyySefnDYd6aTBgwfz6KOPMmHCBDp37sy+ffu4++67z6iuXr16kZqaSuPGjbnzzjvp27cv8+bNqzL+gQcewGq10rp1a+rUqcP+/fux2+1MnDiR9u3be6ZWvf3222d8vyIiUskwfzzRV0REftK0adN4+eWXOXDgQLCbIiIiclbpCckiIj/jxRdfpEuXLiQmJrJ8+XL+/Oc/M3r06GA3S0RE5KxT50BE5Gfs2rWLJ598kuPHj9OwYUPuv/9+Jk6cGOxmiYiInHWaViQiIiIiIoAWJIuIiIiISAV1DkREREREBFDnQEREREREKqhzICIiIiIigDoHIiIiIiJSQZ0DEREREREB1DkQEREREZEK6hyIiIiIiAgA/w96KF41FkHQtAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import torch\n", + "from concrete.ml.torch.compile import build_quantized_module, compile_torch_model\n", + "\n", + "def run_experiment(fhe_mode=\"disable\"):\n", + " # Assuming 'images' and 'resnet18' are already defined\n", + " resnet18.eval() # Set the model to inference mode\n", + "\n", + " # Define ranges for n_bits and rounding_threshold_bits\n", + " n_bits_range = range(2, 10) # 2 to 10\n", + " rounding_threshold_bits_range = list(range(2, 9)) + [None] # 2 to 9 and None\n", + "\n", + " # Initialize a dictionary to store accuracies for each combination\n", + " accuracies = {}\n", + "\n", + " # Loop over the ranges of n_bits and rounding_threshold_bits\n", + " for n_bits in n_bits_range:\n", + " for rounding_threshold_bits in rounding_threshold_bits_range:\n", + " compile_method = build_quantized_module if fhe_mode == \"disable\" else compile_torch_model\n", + " q_module = compile_method(\n", + " resnet18,\n", + " torch_inputset=images,\n", + " n_bits=n_bits,\n", + " rounding_threshold_bits=rounding_threshold_bits,\n", + " )\n", + "\n", + " with torch.no_grad():\n", + " outputs_fhe = q_module.forward(images.detach().numpy(), fhe=fhe_mode)\n", + " probabilities_fhe = torch.nn.functional.softmax(torch.from_numpy(outputs_fhe), dim=-1)\n", + " outputs = resnet18(images)\n", + " probabilities = torch.nn.functional.softmax(outputs, dim=-1)\n", + "\n", + " # Calculate and store accuracy\n", + " fhe_accuracy_vs_fp32 = (\n", + " (probabilities_fhe.argmax(-1) == probabilities.argmax(-1)).float().mean().item()\n", + " )\n", + " accuracies[(n_bits, rounding_threshold_bits)] = fhe_accuracy_vs_fp32\n", + "\n", + " # Convert accuracies to a 2D array for plotting\n", + " accuracy_matrix = np.zeros((len(n_bits_range), len(rounding_threshold_bits_range)))\n", + " for i, n_bits in enumerate(n_bits_range):\n", + " for j, rounding_threshold_bits in enumerate(rounding_threshold_bits_range):\n", + " accuracy_matrix[i, j] = accuracies[(n_bits, rounding_threshold_bits)]\n", + "\n", + " # Plotting\n", + " fig, ax = plt.subplots(figsize=(10, 8))\n", + " cax = ax.matshow(accuracy_matrix, cmap=\"viridis\")\n", + " fig.colorbar(cax)\n", + "\n", + " # Set ticks and labels\n", + " ax.set_xticklabels([\"\"] + rounding_threshold_bits_range, rotation=45)\n", + " ax.set_yticklabels([\"\"] + list(n_bits_range))\n", + " ax.set_xlabel(\"Rounding Threshold Bits\")\n", + " ax.set_ylabel(\"N Bits\")\n", + " ax.set_title(f\"Accuracy of FHE ({fhe_mode}) vs. FP32\")\n", + "\n", + " # Annotate each cell with the accuracy percentage\n", + " for i in range(len(n_bits_range)):\n", + " for j in range(len(rounding_threshold_bits_range)):\n", + " ax.text(j, i, f\"{accuracy_matrix[i, j]:.2f}\", va=\"center\", ha=\"center\", color=\"white\")\n", + "\n", + " plt.show()\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "run_experiment(fhe_mode=\"disable\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "run_experiment(fhe_mode=\"simulate\")" + ] + } + ], + "metadata": { + "execution": { + "timeout": 10800 + }, + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From c992b0ba31d54529653639f68ddcc3a7c241f700 Mon Sep 17 00:00:00 2001 From: jfrery Date: Thu, 13 Jun 2024 08:39:53 +0000 Subject: [PATCH 2/9] chore: add python script with optional fhe execution --- use_case_examples/resnet/run_resnet18_fhe.py | 153 +++++++++++++++++++ 1 file changed, 153 insertions(+) create mode 100644 use_case_examples/resnet/run_resnet18_fhe.py diff --git a/use_case_examples/resnet/run_resnet18_fhe.py b/use_case_examples/resnet/run_resnet18_fhe.py new file mode 100644 index 000000000..2a25ff072 --- /dev/null +++ b/use_case_examples/resnet/run_resnet18_fhe.py @@ -0,0 +1,153 @@ +import argparse +import torch +from resnet import ResNet18_Weights, resnet18_custom +from concrete.ml.torch.compile import compile_torch_model +import requests +from io import BytesIO +from PIL import Image +from torchvision import transforms +import time +from pathlib import Path + +parser = argparse.ArgumentParser(description="Run ResNet18 model with FHE execution.") +parser.add_argument('--run_fhe', action='store_true', help="Run the actual FHE execution.") +args = parser.parse_args() + +BASE_DIR = Path(__file__).resolve().parent + +# Load the ResNet18 model with pretrained weights +resnet18 = resnet18_custom(weights=ResNet18_Weights.IMAGENET1K_V1) + +# Use ImageNet classes file to map class names to indices +imagenet_classes_path = BASE_DIR / "imagenet_classes.txt" +with open(imagenet_classes_path, "r") as f: + class_to_index = {cls: idx for idx, cls in enumerate([line.strip() for line in f.readlines()])} + +# Define image transformation +transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), +]) + +# TODO: have a more automated way to grab N images from the net. +# Download an example image from the web +image_urls = [ + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01443537_goldfish.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01614925_bald_eagle.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01697457_African_crocodile.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01592084_chickadee.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01601694_water_ouzel.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01739381_vine_snake.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01806567_quail.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01917289_brain_coral.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02077923_sea_lion.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02051845_pelican.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02110185_Siberian_husky.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02165456_ladybug.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02325366_wood_rabbit.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02391049_zebra.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02481823_chimpanzee.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02510455_giant_panda.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02643566_lionfish.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02787622_banjo.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02817516_bearskin.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02871525_bookshop.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02930766_cab.JPEG", + "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02974003_car_wheel.JPEG", +] + +images, labels = [], [] +for image_url in image_urls: + class_name = '_'.join(image_url.split('/')[-1].split('.')[0].split('_')[1:]).replace('_', ' ') + if class_name in class_to_index: + response = requests.get(image_url) + img = Image.open(BytesIO(response.content)) + images.append(transform(img)) + labels.append(class_to_index[class_name]) + +# Stack images to create a mini batch +images = torch.stack(images) +labels = torch.tensor(labels) + +# Function to compute accuracy +def compute_accuracy(predicted, labels): + correct = (predicted == labels).sum().item() + total = labels.size(0) + return 100 * correct / total + +# Function to compute top-k accuracy +def compute_topk_accuracy(outputs, labels, topk=5): + _, topk_predicted = torch.topk(outputs, topk, dim=1) + correct_topk = sum([labels[i] in topk_predicted[i] for i in range(len(labels))]) + total = labels.size(0) + return 100 * correct_topk / total + +# Forward pass through the model to get the predictions +with torch.no_grad(): + outputs = resnet18(images) + _, predicted = torch.max(outputs, 1) + +# Compute and print accuracy +accuracy = compute_accuracy(predicted, labels) +print(f"Accuracy of the ResNet18 model on the images: {accuracy:.4f}%") + +topk_accuracy = compute_topk_accuracy(outputs, labels, topk=5) +print(f"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy:.4f}%") + +# Compile the model +print("Compiling the model...") +q_module = compile_torch_model( + resnet18, + torch_inputset=images, + n_bits={"model_inputs": 8, "op_inputs": 7, "op_weights": 6, "model_outputs": 8}, + rounding_threshold_bits={"n_bits": 7, "method":"APPROXIMATE"}, + p_error=0.005 +) +print("Model compiled successfully.") + +# Forward pass with FHE disabled +with torch.no_grad(): + outputs_disable = q_module.forward(images.detach().numpy(), fhe="disable") + _, predicted_disable = torch.max(torch.from_numpy(outputs_disable), 1) + +# Compute accuracy +fhe_accuracy_vs_fp32 = (predicted_disable == predicted).float().mean().item() +print(f"Quantized Model Fidelity with FP32: {fhe_accuracy_vs_fp32:.4f}%") + +# Compute and print accuracy for quantized model +accuracy = compute_accuracy(predicted_disable, labels) +print(f"Quantized Model Accuracy of the FHEResNet18 on the images: {accuracy:.4f}%") +topk_accuracy = compute_topk_accuracy(torch.from_numpy(outputs_disable), labels, topk=5) +print(f"Quantized Model Top-5 Accuracy of the FHEResNet18 on the images: {topk_accuracy:.4f}%") + +# Forward pass with FHE simulation +with torch.no_grad(): + outputs_simulate = q_module.forward(images.detach().numpy(), fhe="simulate") + _, predicted_simulate = torch.max(torch.from_numpy(outputs_simulate), 1) + +# Compute and print accuracy for FHE simulation +accuracy = compute_accuracy(predicted_simulate, labels) +print(f"FHE Simulation Accuracy of the FHEResNet18 on the images: {accuracy:.4f}%") +topk_accuracy = compute_topk_accuracy(torch.from_numpy(outputs_simulate), labels, topk=5) +print(f"FHE Simulation Top-5 Accuracy of the FHEResNet18 on the images: {topk_accuracy:.4f}%") + +if args.run_fhe: + # Run FHE execution and measure time on a single image + q_module.fhe_circuit.keygen() + single_image = images[0:1].detach().numpy() + + start = time.time() + fhe_output = q_module.forward(single_image, fhe="simulate") + end = time.time() + print(f"Time taken for one FHE execution: {end - start:.4f} seconds") + print(f"FHE execution output: {fhe_output}") + + # Run FHE simulation on the same single image + fhe_sim_output = q_module.forward(single_image, fhe="simulate") + print(f"FHE simulation output: {fhe_sim_output}") + print(f"Actual label: {labels[0].item()}") + +else: + print("FHE execution was not run. Use --run_fhe to enable it.") \ No newline at end of file From 76811ff9d3aa7afba777b3c9fc0e94c8ae3a001a Mon Sep 17 00:00:00 2001 From: jfrery Date: Thu, 13 Jun 2024 11:13:17 +0200 Subject: [PATCH 3/9] chore: clean script to launch resnet evaluation closes https://github.com/zama-ai/concrete-ml-internal/issues/4379 --- .../resnet/LOC_synset_mapping.txt | 1000 +++++++++ use_case_examples/resnet/README.md | 69 +- .../resnet/folder_index_class.txt | 1000 --------- use_case_examples/resnet/imagenet_classes.txt | 1000 --------- use_case_examples/resnet/requirements.txt | 1 + use_case_examples/resnet/resnet.py | 37 +- use_case_examples/resnet/resnet_fhe.ipynb | 1989 ----------------- use_case_examples/resnet/run_resnet18_fhe.py | 168 +- use_case_examples/resnet/utils_resnet.py | 111 + 9 files changed, 1267 insertions(+), 4108 deletions(-) create mode 100644 use_case_examples/resnet/LOC_synset_mapping.txt delete mode 100644 use_case_examples/resnet/folder_index_class.txt delete mode 100644 use_case_examples/resnet/imagenet_classes.txt create mode 100644 use_case_examples/resnet/requirements.txt delete mode 100644 use_case_examples/resnet/resnet_fhe.ipynb create mode 100644 use_case_examples/resnet/utils_resnet.py diff --git a/use_case_examples/resnet/LOC_synset_mapping.txt b/use_case_examples/resnet/LOC_synset_mapping.txt new file mode 100644 index 000000000..a9e8c7f50 --- /dev/null +++ b/use_case_examples/resnet/LOC_synset_mapping.txt @@ -0,0 +1,1000 @@ +n01440764 tench, Tinca tinca +n01443537 goldfish, Carassius auratus +n01484850 great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias +n01491361 tiger shark, Galeocerdo cuvieri +n01494475 hammerhead, hammerhead shark +n01496331 electric ray, crampfish, numbfish, torpedo +n01498041 stingray +n01514668 cock +n01514859 hen +n01518878 ostrich, Struthio camelus +n01530575 brambling, Fringilla montifringilla +n01531178 goldfinch, Carduelis carduelis +n01532829 house finch, linnet, Carpodacus mexicanus +n01534433 junco, snowbird +n01537544 indigo bunting, indigo finch, indigo bird, Passerina cyanea +n01558993 robin, American robin, Turdus migratorius +n01560419 bulbul +n01580077 jay +n01582220 magpie +n01592084 chickadee +n01601694 water ouzel, dipper +n01608432 kite +n01614925 bald eagle, American eagle, Haliaeetus leucocephalus +n01616318 vulture +n01622779 great grey owl, great gray owl, Strix nebulosa +n01629819 European fire salamander, Salamandra salamandra +n01630670 common newt, Triturus vulgaris +n01631663 eft +n01632458 spotted salamander, Ambystoma maculatum +n01632777 axolotl, mud puppy, Ambystoma mexicanum +n01641577 bullfrog, Rana catesbeiana +n01644373 tree frog, tree-frog +n01644900 tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui +n01664065 loggerhead, loggerhead turtle, Caretta caretta +n01665541 leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea +n01667114 mud turtle +n01667778 terrapin +n01669191 box turtle, box tortoise +n01675722 banded gecko +n01677366 common iguana, iguana, Iguana iguana +n01682714 American chameleon, anole, Anolis carolinensis +n01685808 whiptail, whiptail lizard +n01687978 agama +n01688243 frilled lizard, Chlamydosaurus kingi +n01689811 alligator lizard +n01692333 Gila monster, Heloderma suspectum +n01693334 green lizard, Lacerta viridis +n01694178 African chameleon, Chamaeleo chamaeleon +n01695060 Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis +n01697457 African crocodile, Nile crocodile, Crocodylus niloticus +n01698640 American alligator, Alligator mississipiensis +n01704323 triceratops +n01728572 thunder snake, worm snake, Carphophis amoenus +n01728920 ringneck snake, ring-necked snake, ring snake +n01729322 hognose snake, puff adder, sand viper +n01729977 green snake, grass snake +n01734418 king snake, kingsnake +n01735189 garter snake, grass snake +n01737021 water snake +n01739381 vine snake +n01740131 night snake, Hypsiglena torquata +n01742172 boa constrictor, Constrictor constrictor +n01744401 rock python, rock snake, Python sebae +n01748264 Indian cobra, Naja naja +n01749939 green mamba +n01751748 sea snake +n01753488 horned viper, cerastes, sand viper, horned asp, Cerastes cornutus +n01755581 diamondback, diamondback rattlesnake, Crotalus adamanteus +n01756291 sidewinder, horned rattlesnake, Crotalus cerastes +n01768244 trilobite +n01770081 harvestman, daddy longlegs, Phalangium opilio +n01770393 scorpion +n01773157 black and gold garden spider, Argiope aurantia +n01773549 barn spider, Araneus cavaticus +n01773797 garden spider, Aranea diademata +n01774384 black widow, Latrodectus mactans +n01774750 tarantula +n01775062 wolf spider, hunting spider +n01776313 tick +n01784675 centipede +n01795545 black grouse +n01796340 ptarmigan +n01797886 ruffed grouse, partridge, Bonasa umbellus +n01798484 prairie chicken, prairie grouse, prairie fowl +n01806143 peacock +n01806567 quail +n01807496 partridge +n01817953 African grey, African gray, Psittacus erithacus +n01818515 macaw +n01819313 sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita +n01820546 lorikeet +n01824575 coucal +n01828970 bee eater +n01829413 hornbill +n01833805 hummingbird +n01843065 jacamar +n01843383 toucan +n01847000 drake +n01855032 red-breasted merganser, Mergus serrator +n01855672 goose +n01860187 black swan, Cygnus atratus +n01871265 tusker +n01872401 echidna, spiny anteater, anteater +n01873310 platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus +n01877812 wallaby, brush kangaroo +n01882714 koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus +n01883070 wombat +n01910747 jellyfish +n01914609 sea anemone, anemone +n01917289 brain coral +n01924916 flatworm, platyhelminth +n01930112 nematode, nematode worm, roundworm +n01943899 conch +n01944390 snail +n01945685 slug +n01950731 sea slug, nudibranch +n01955084 chiton, coat-of-mail shell, sea cradle, polyplacophore +n01968897 chambered nautilus, pearly nautilus, nautilus +n01978287 Dungeness crab, Cancer magister +n01978455 rock crab, Cancer irroratus +n01980166 fiddler crab +n01981276 king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica +n01983481 American lobster, Northern lobster, Maine lobster, Homarus americanus +n01984695 spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish +n01985128 crayfish, crawfish, crawdad, crawdaddy +n01986214 hermit crab +n01990800 isopod +n02002556 white stork, Ciconia ciconia +n02002724 black stork, Ciconia nigra +n02006656 spoonbill +n02007558 flamingo +n02009229 little blue heron, Egretta caerulea +n02009912 American egret, great white heron, Egretta albus +n02011460 bittern +n02012849 crane +n02013706 limpkin, Aramus pictus +n02017213 European gallinule, Porphyrio porphyrio +n02018207 American coot, marsh hen, mud hen, water hen, Fulica americana +n02018795 bustard +n02025239 ruddy turnstone, Arenaria interpres +n02027492 red-backed sandpiper, dunlin, Erolia alpina +n02028035 redshank, Tringa totanus +n02033041 dowitcher +n02037110 oystercatcher, oyster catcher +n02051845 pelican +n02056570 king penguin, Aptenodytes patagonica +n02058221 albatross, mollymawk +n02066245 grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus +n02071294 killer whale, killer, orca, grampus, sea wolf, Orcinus orca +n02074367 dugong, Dugong dugon +n02077923 sea lion +n02085620 Chihuahua +n02085782 Japanese spaniel +n02085936 Maltese dog, Maltese terrier, Maltese +n02086079 Pekinese, Pekingese, Peke +n02086240 Shih-Tzu +n02086646 Blenheim spaniel +n02086910 papillon +n02087046 toy terrier +n02087394 Rhodesian ridgeback +n02088094 Afghan hound, Afghan +n02088238 basset, basset hound +n02088364 beagle +n02088466 bloodhound, sleuthhound +n02088632 bluetick +n02089078 black-and-tan coonhound +n02089867 Walker hound, Walker foxhound +n02089973 English foxhound +n02090379 redbone +n02090622 borzoi, Russian wolfhound +n02090721 Irish wolfhound +n02091032 Italian greyhound +n02091134 whippet +n02091244 Ibizan hound, Ibizan Podenco +n02091467 Norwegian elkhound, elkhound +n02091635 otterhound, otter hound +n02091831 Saluki, gazelle hound +n02092002 Scottish deerhound, deerhound +n02092339 Weimaraner +n02093256 Staffordshire bullterrier, Staffordshire bull terrier +n02093428 American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier +n02093647 Bedlington terrier +n02093754 Border terrier +n02093859 Kerry blue terrier +n02093991 Irish terrier +n02094114 Norfolk terrier +n02094258 Norwich terrier +n02094433 Yorkshire terrier +n02095314 wire-haired fox terrier +n02095570 Lakeland terrier +n02095889 Sealyham terrier, Sealyham +n02096051 Airedale, Airedale terrier +n02096177 cairn, cairn terrier +n02096294 Australian terrier +n02096437 Dandie Dinmont, Dandie Dinmont terrier +n02096585 Boston bull, Boston terrier +n02097047 miniature schnauzer +n02097130 giant schnauzer +n02097209 standard schnauzer +n02097298 Scotch terrier, Scottish terrier, Scottie +n02097474 Tibetan terrier, chrysanthemum dog +n02097658 silky terrier, Sydney silky +n02098105 soft-coated wheaten terrier +n02098286 West Highland white terrier +n02098413 Lhasa, Lhasa apso +n02099267 flat-coated retriever +n02099429 curly-coated retriever +n02099601 golden retriever +n02099712 Labrador retriever +n02099849 Chesapeake Bay retriever +n02100236 German short-haired pointer +n02100583 vizsla, Hungarian pointer +n02100735 English setter +n02100877 Irish setter, red setter +n02101006 Gordon setter +n02101388 Brittany spaniel +n02101556 clumber, clumber spaniel +n02102040 English springer, English springer spaniel +n02102177 Welsh springer spaniel +n02102318 cocker spaniel, English cocker spaniel, cocker +n02102480 Sussex spaniel +n02102973 Irish water spaniel +n02104029 kuvasz +n02104365 schipperke +n02105056 groenendael +n02105162 malinois +n02105251 briard +n02105412 kelpie +n02105505 komondor +n02105641 Old English sheepdog, bobtail +n02105855 Shetland sheepdog, Shetland sheep dog, Shetland +n02106030 collie +n02106166 Border collie +n02106382 Bouvier des Flandres, Bouviers des Flandres +n02106550 Rottweiler +n02106662 German shepherd, German shepherd dog, German police dog, alsatian +n02107142 Doberman, Doberman pinscher +n02107312 miniature pinscher +n02107574 Greater Swiss Mountain dog +n02107683 Bernese mountain dog +n02107908 Appenzeller +n02108000 EntleBucher +n02108089 boxer +n02108422 bull mastiff +n02108551 Tibetan mastiff +n02108915 French bulldog +n02109047 Great Dane +n02109525 Saint Bernard, St Bernard +n02109961 Eskimo dog, husky +n02110063 malamute, malemute, Alaskan malamute +n02110185 Siberian husky +n02110341 dalmatian, coach dog, carriage dog +n02110627 affenpinscher, monkey pinscher, monkey dog +n02110806 basenji +n02110958 pug, pug-dog +n02111129 Leonberg +n02111277 Newfoundland, Newfoundland dog +n02111500 Great Pyrenees +n02111889 Samoyed, Samoyede +n02112018 Pomeranian +n02112137 chow, chow chow +n02112350 keeshond +n02112706 Brabancon griffon +n02113023 Pembroke, Pembroke Welsh corgi +n02113186 Cardigan, Cardigan Welsh corgi +n02113624 toy poodle +n02113712 miniature poodle +n02113799 standard poodle +n02113978 Mexican hairless +n02114367 timber wolf, grey wolf, gray wolf, Canis lupus +n02114548 white wolf, Arctic wolf, Canis lupus tundrarum +n02114712 red wolf, maned wolf, Canis rufus, Canis niger +n02114855 coyote, prairie wolf, brush wolf, Canis latrans +n02115641 dingo, warrigal, warragal, Canis dingo +n02115913 dhole, Cuon alpinus +n02116738 African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus +n02117135 hyena, hyaena +n02119022 red fox, Vulpes vulpes +n02119789 kit fox, Vulpes macrotis +n02120079 Arctic fox, white fox, Alopex lagopus +n02120505 grey fox, gray fox, Urocyon cinereoargenteus +n02123045 tabby, tabby cat +n02123159 tiger cat +n02123394 Persian cat +n02123597 Siamese cat, Siamese +n02124075 Egyptian cat +n02125311 cougar, puma, catamount, mountain lion, painter, panther, Felis concolor +n02127052 lynx, catamount +n02128385 leopard, Panthera pardus +n02128757 snow leopard, ounce, Panthera uncia +n02128925 jaguar, panther, Panthera onca, Felis onca +n02129165 lion, king of beasts, Panthera leo +n02129604 tiger, Panthera tigris +n02130308 cheetah, chetah, Acinonyx jubatus +n02132136 brown bear, bruin, Ursus arctos +n02133161 American black bear, black bear, Ursus americanus, Euarctos americanus +n02134084 ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus +n02134418 sloth bear, Melursus ursinus, Ursus ursinus +n02137549 mongoose +n02138441 meerkat, mierkat +n02165105 tiger beetle +n02165456 ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle +n02167151 ground beetle, carabid beetle +n02168699 long-horned beetle, longicorn, longicorn beetle +n02169497 leaf beetle, chrysomelid +n02172182 dung beetle +n02174001 rhinoceros beetle +n02177972 weevil +n02190166 fly +n02206856 bee +n02219486 ant, emmet, pismire +n02226429 grasshopper, hopper +n02229544 cricket +n02231487 walking stick, walkingstick, stick insect +n02233338 cockroach, roach +n02236044 mantis, mantid +n02256656 cicada, cicala +n02259212 leafhopper +n02264363 lacewing, lacewing fly +n02268443 dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk +n02268853 damselfly +n02276258 admiral +n02277742 ringlet, ringlet butterfly +n02279972 monarch, monarch butterfly, milkweed butterfly, Danaus plexippus +n02280649 cabbage butterfly +n02281406 sulphur butterfly, sulfur butterfly +n02281787 lycaenid, lycaenid butterfly +n02317335 starfish, sea star +n02319095 sea urchin +n02321529 sea cucumber, holothurian +n02325366 wood rabbit, cottontail, cottontail rabbit +n02326432 hare +n02328150 Angora, Angora rabbit +n02342885 hamster +n02346627 porcupine, hedgehog +n02356798 fox squirrel, eastern fox squirrel, Sciurus niger +n02361337 marmot +n02363005 beaver +n02364673 guinea pig, Cavia cobaya +n02389026 sorrel +n02391049 zebra +n02395406 hog, pig, grunter, squealer, Sus scrofa +n02396427 wild boar, boar, Sus scrofa +n02397096 warthog +n02398521 hippopotamus, hippo, river horse, Hippopotamus amphibius +n02403003 ox +n02408429 water buffalo, water ox, Asiatic buffalo, Bubalus bubalis +n02410509 bison +n02412080 ram, tup +n02415577 bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis +n02417914 ibex, Capra ibex +n02422106 hartebeest +n02422699 impala, Aepyceros melampus +n02423022 gazelle +n02437312 Arabian camel, dromedary, Camelus dromedarius +n02437616 llama +n02441942 weasel +n02442845 mink +n02443114 polecat, fitch, foulmart, foumart, Mustela putorius +n02443484 black-footed ferret, ferret, Mustela nigripes +n02444819 otter +n02445715 skunk, polecat, wood pussy +n02447366 badger +n02454379 armadillo +n02457408 three-toed sloth, ai, Bradypus tridactylus +n02480495 orangutan, orang, orangutang, Pongo pygmaeus +n02480855 gorilla, Gorilla gorilla +n02481823 chimpanzee, chimp, Pan troglodytes +n02483362 gibbon, Hylobates lar +n02483708 siamang, Hylobates syndactylus, Symphalangus syndactylus +n02484975 guenon, guenon monkey +n02486261 patas, hussar monkey, Erythrocebus patas +n02486410 baboon +n02487347 macaque +n02488291 langur +n02488702 colobus, colobus monkey +n02489166 proboscis monkey, Nasalis larvatus +n02490219 marmoset +n02492035 capuchin, ringtail, Cebus capucinus +n02492660 howler monkey, howler +n02493509 titi, titi monkey +n02493793 spider monkey, Ateles geoffroyi +n02494079 squirrel monkey, Saimiri sciureus +n02497673 Madagascar cat, ring-tailed lemur, Lemur catta +n02500267 indri, indris, Indri indri, Indri brevicaudatus +n02504013 Indian elephant, Elephas maximus +n02504458 African elephant, Loxodonta africana +n02509815 lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens +n02510455 giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca +n02514041 barracouta, snoek +n02526121 eel +n02536864 coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch +n02606052 rock beauty, Holocanthus tricolor +n02607072 anemone fish +n02640242 sturgeon +n02641379 gar, garfish, garpike, billfish, Lepisosteus osseus +n02643566 lionfish +n02655020 puffer, pufferfish, blowfish, globefish +n02666196 abacus +n02667093 abaya +n02669723 academic gown, academic robe, judge's robe +n02672831 accordion, piano accordion, squeeze box +n02676566 acoustic guitar +n02687172 aircraft carrier, carrier, flattop, attack aircraft carrier +n02690373 airliner +n02692877 airship, dirigible +n02699494 altar +n02701002 ambulance +n02704792 amphibian, amphibious vehicle +n02708093 analog clock +n02727426 apiary, bee house +n02730930 apron +n02747177 ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin +n02749479 assault rifle, assault gun +n02769748 backpack, back pack, knapsack, packsack, rucksack, haversack +n02776631 bakery, bakeshop, bakehouse +n02777292 balance beam, beam +n02782093 balloon +n02783161 ballpoint, ballpoint pen, ballpen, Biro +n02786058 Band Aid +n02787622 banjo +n02788148 bannister, banister, balustrade, balusters, handrail +n02790996 barbell +n02791124 barber chair +n02791270 barbershop +n02793495 barn +n02794156 barometer +n02795169 barrel, cask +n02797295 barrow, garden cart, lawn cart, wheelbarrow +n02799071 baseball +n02802426 basketball +n02804414 bassinet +n02804610 bassoon +n02807133 bathing cap, swimming cap +n02808304 bath towel +n02808440 bathtub, bathing tub, bath, tub +n02814533 beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon +n02814860 beacon, lighthouse, beacon light, pharos +n02815834 beaker +n02817516 bearskin, busby, shako +n02823428 beer bottle +n02823750 beer glass +n02825657 bell cote, bell cot +n02834397 bib +n02835271 bicycle-built-for-two, tandem bicycle, tandem +n02837789 bikini, two-piece +n02840245 binder, ring-binder +n02841315 binoculars, field glasses, opera glasses +n02843684 birdhouse +n02859443 boathouse +n02860847 bobsled, bobsleigh, bob +n02865351 bolo tie, bolo, bola tie, bola +n02869837 bonnet, poke bonnet +n02870880 bookcase +n02871525 bookshop, bookstore, bookstall +n02877765 bottlecap +n02879718 bow +n02883205 bow tie, bow-tie, bowtie +n02892201 brass, memorial tablet, plaque +n02892767 brassiere, bra, bandeau +n02894605 breakwater, groin, groyne, mole, bulwark, seawall, jetty +n02895154 breastplate, aegis, egis +n02906734 broom +n02909870 bucket, pail +n02910353 buckle +n02916936 bulletproof vest +n02917067 bullet train, bullet +n02927161 butcher shop, meat market +n02930766 cab, hack, taxi, taxicab +n02939185 caldron, cauldron +n02948072 candle, taper, wax light +n02950826 cannon +n02951358 canoe +n02951585 can opener, tin opener +n02963159 cardigan +n02965783 car mirror +n02966193 carousel, carrousel, merry-go-round, roundabout, whirligig +n02966687 carpenter's kit, tool kit +n02971356 carton +n02974003 car wheel +n02977058 cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM +n02978881 cassette +n02979186 cassette player +n02980441 castle +n02981792 catamaran +n02988304 CD player +n02992211 cello, violoncello +n02992529 cellular telephone, cellular phone, cellphone, cell, mobile phone +n02999410 chain +n03000134 chainlink fence +n03000247 chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour +n03000684 chain saw, chainsaw +n03014705 chest +n03016953 chiffonier, commode +n03017168 chime, bell, gong +n03018349 china cabinet, china closet +n03026506 Christmas stocking +n03028079 church, church building +n03032252 cinema, movie theater, movie theatre, movie house, picture palace +n03041632 cleaver, meat cleaver, chopper +n03042490 cliff dwelling +n03045698 cloak +n03047690 clog, geta, patten, sabot +n03062245 cocktail shaker +n03063599 coffee mug +n03063689 coffeepot +n03065424 coil, spiral, volute, whorl, helix +n03075370 combination lock +n03085013 computer keyboard, keypad +n03089624 confectionery, confectionary, candy store +n03095699 container ship, containership, container vessel +n03100240 convertible +n03109150 corkscrew, bottle screw +n03110669 cornet, horn, trumpet, trump +n03124043 cowboy boot +n03124170 cowboy hat, ten-gallon hat +n03125729 cradle +n03126707 crane +n03127747 crash helmet +n03127925 crate +n03131574 crib, cot +n03133878 Crock Pot +n03134739 croquet ball +n03141823 crutch +n03146219 cuirass +n03160309 dam, dike, dyke +n03179701 desk +n03180011 desktop computer +n03187595 dial telephone, dial phone +n03188531 diaper, nappy, napkin +n03196217 digital clock +n03197337 digital watch +n03201208 dining table, board +n03207743 dishrag, dishcloth +n03207941 dishwasher, dish washer, dishwashing machine +n03208938 disk brake, disc brake +n03216828 dock, dockage, docking facility +n03218198 dogsled, dog sled, dog sleigh +n03220513 dome +n03223299 doormat, welcome mat +n03240683 drilling platform, offshore rig +n03249569 drum, membranophone, tympan +n03250847 drumstick +n03255030 dumbbell +n03259280 Dutch oven +n03271574 electric fan, blower +n03272010 electric guitar +n03272562 electric locomotive +n03290653 entertainment center +n03291819 envelope +n03297495 espresso maker +n03314780 face powder +n03325584 feather boa, boa +n03337140 file, file cabinet, filing cabinet +n03344393 fireboat +n03345487 fire engine, fire truck +n03347037 fire screen, fireguard +n03355925 flagpole, flagstaff +n03372029 flute, transverse flute +n03376595 folding chair +n03379051 football helmet +n03384352 forklift +n03388043 fountain +n03388183 fountain pen +n03388549 four-poster +n03393912 freight car +n03394916 French horn, horn +n03400231 frying pan, frypan, skillet +n03404251 fur coat +n03417042 garbage truck, dustcart +n03424325 gasmask, respirator, gas helmet +n03425413 gas pump, gasoline pump, petrol pump, island dispenser +n03443371 goblet +n03444034 go-kart +n03445777 golf ball +n03445924 golfcart, golf cart +n03447447 gondola +n03447721 gong, tam-tam +n03450230 gown +n03452741 grand piano, grand +n03457902 greenhouse, nursery, glasshouse +n03459775 grille, radiator grille +n03461385 grocery store, grocery, food market, market +n03467068 guillotine +n03476684 hair slide +n03476991 hair spray +n03478589 half track +n03481172 hammer +n03482405 hamper +n03483316 hand blower, blow dryer, blow drier, hair dryer, hair drier +n03485407 hand-held computer, hand-held microcomputer +n03485794 handkerchief, hankie, hanky, hankey +n03492542 hard disc, hard disk, fixed disk +n03494278 harmonica, mouth organ, harp, mouth harp +n03495258 harp +n03496892 harvester, reaper +n03498962 hatchet +n03527444 holster +n03529860 home theater, home theatre +n03530642 honeycomb +n03532672 hook, claw +n03534580 hoopskirt, crinoline +n03535780 horizontal bar, high bar +n03538406 horse cart, horse-cart +n03544143 hourglass +n03584254 iPod +n03584829 iron, smoothing iron +n03590841 jack-o'-lantern +n03594734 jean, blue jean, denim +n03594945 jeep, landrover +n03595614 jersey, T-shirt, tee shirt +n03598930 jigsaw puzzle +n03599486 jinrikisha, ricksha, rickshaw +n03602883 joystick +n03617480 kimono +n03623198 knee pad +n03627232 knot +n03630383 lab coat, laboratory coat +n03633091 ladle +n03637318 lampshade, lamp shade +n03642806 laptop, laptop computer +n03649909 lawn mower, mower +n03657121 lens cap, lens cover +n03658185 letter opener, paper knife, paperknife +n03661043 library +n03662601 lifeboat +n03666591 lighter, light, igniter, ignitor +n03670208 limousine, limo +n03673027 liner, ocean liner +n03676483 lipstick, lip rouge +n03680355 Loafer +n03690938 lotion +n03691459 loudspeaker, speaker, speaker unit, loudspeaker system, speaker system +n03692522 loupe, jeweler's loupe +n03697007 lumbermill, sawmill +n03706229 magnetic compass +n03709823 mailbag, postbag +n03710193 mailbox, letter box +n03710637 maillot +n03710721 maillot, tank suit +n03717622 manhole cover +n03720891 maraca +n03721384 marimba, xylophone +n03724870 mask +n03729826 matchstick +n03733131 maypole +n03733281 maze, labyrinth +n03733805 measuring cup +n03742115 medicine chest, medicine cabinet +n03743016 megalith, megalithic structure +n03759954 microphone, mike +n03761084 microwave, microwave oven +n03763968 military uniform +n03764736 milk can +n03769881 minibus +n03770439 miniskirt, mini +n03770679 minivan +n03773504 missile +n03775071 mitten +n03775546 mixing bowl +n03776460 mobile home, manufactured home +n03777568 Model T +n03777754 modem +n03781244 monastery +n03782006 monitor +n03785016 moped +n03786901 mortar +n03787032 mortarboard +n03788195 mosque +n03788365 mosquito net +n03791053 motor scooter, scooter +n03792782 mountain bike, all-terrain bike, off-roader +n03792972 mountain tent +n03793489 mouse, computer mouse +n03794056 mousetrap +n03796401 moving van +n03803284 muzzle +n03804744 nail +n03814639 neck brace +n03814906 necklace +n03825788 nipple +n03832673 notebook, notebook computer +n03837869 obelisk +n03838899 oboe, hautboy, hautbois +n03840681 ocarina, sweet potato +n03841143 odometer, hodometer, mileometer, milometer +n03843555 oil filter +n03854065 organ, pipe organ +n03857828 oscilloscope, scope, cathode-ray oscilloscope, CRO +n03866082 overskirt +n03868242 oxcart +n03868863 oxygen mask +n03871628 packet +n03873416 paddle, boat paddle +n03874293 paddlewheel, paddle wheel +n03874599 padlock +n03876231 paintbrush +n03877472 pajama, pyjama, pj's, jammies +n03877845 palace +n03884397 panpipe, pandean pipe, syrinx +n03887697 paper towel +n03888257 parachute, chute +n03888605 parallel bars, bars +n03891251 park bench +n03891332 parking meter +n03895866 passenger car, coach, carriage +n03899768 patio, terrace +n03902125 pay-phone, pay-station +n03903868 pedestal, plinth, footstall +n03908618 pencil box, pencil case +n03908714 pencil sharpener +n03916031 perfume, essence +n03920288 Petri dish +n03924679 photocopier +n03929660 pick, plectrum, plectron +n03929855 pickelhaube +n03930313 picket fence, paling +n03930630 pickup, pickup truck +n03933933 pier +n03935335 piggy bank, penny bank +n03937543 pill bottle +n03938244 pillow +n03942813 ping-pong ball +n03944341 pinwheel +n03947888 pirate, pirate ship +n03950228 pitcher, ewer +n03954731 plane, carpenter's plane, woodworking plane +n03956157 planetarium +n03958227 plastic bag +n03961711 plate rack +n03967562 plow, plough +n03970156 plunger, plumber's helper +n03976467 Polaroid camera, Polaroid Land camera +n03976657 pole +n03977966 police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria +n03980874 poncho +n03982430 pool table, billiard table, snooker table +n03983396 pop bottle, soda bottle +n03991062 pot, flowerpot +n03992509 potter's wheel +n03995372 power drill +n03998194 prayer rug, prayer mat +n04004767 printer +n04005630 prison, prison house +n04008634 projectile, missile +n04009552 projector +n04019541 puck, hockey puck +n04023962 punching bag, punch bag, punching ball, punchball +n04026417 purse +n04033901 quill, quill pen +n04033995 quilt, comforter, comfort, puff +n04037443 racer, race car, racing car +n04039381 racket, racquet +n04040759 radiator +n04041544 radio, wireless +n04044716 radio telescope, radio reflector +n04049303 rain barrel +n04065272 recreational vehicle, RV, R.V. +n04067472 reel +n04069434 reflex camera +n04070727 refrigerator, icebox +n04074963 remote control, remote +n04081281 restaurant, eating house, eating place, eatery +n04086273 revolver, six-gun, six-shooter +n04090263 rifle +n04099969 rocking chair, rocker +n04111531 rotisserie +n04116512 rubber eraser, rubber, pencil eraser +n04118538 rugby ball +n04118776 rule, ruler +n04120489 running shoe +n04125021 safe +n04127249 safety pin +n04131690 saltshaker, salt shaker +n04133789 sandal +n04136333 sarong +n04141076 sax, saxophone +n04141327 scabbard +n04141975 scale, weighing machine +n04146614 school bus +n04147183 schooner +n04149813 scoreboard +n04152593 screen, CRT screen +n04153751 screw +n04154565 screwdriver +n04162706 seat belt, seatbelt +n04179913 sewing machine +n04192698 shield, buckler +n04200800 shoe shop, shoe-shop, shoe store +n04201297 shoji +n04204238 shopping basket +n04204347 shopping cart +n04208210 shovel +n04209133 shower cap +n04209239 shower curtain +n04228054 ski +n04229816 ski mask +n04235860 sleeping bag +n04238763 slide rule, slipstick +n04239074 sliding door +n04243546 slot, one-armed bandit +n04251144 snorkel +n04252077 snowmobile +n04252225 snowplow, snowplough +n04254120 soap dispenser +n04254680 soccer ball +n04254777 sock +n04258138 solar dish, solar collector, solar furnace +n04259630 sombrero +n04263257 soup bowl +n04264628 space bar +n04265275 space heater +n04266014 space shuttle +n04270147 spatula +n04273569 speedboat +n04275548 spider web, spider's web +n04277352 spindle +n04285008 sports car, sport car +n04286575 spotlight, spot +n04296562 stage +n04310018 steam locomotive +n04311004 steel arch bridge +n04311174 steel drum +n04317175 stethoscope +n04325704 stole +n04326547 stone wall +n04328186 stopwatch, stop watch +n04330267 stove +n04332243 strainer +n04335435 streetcar, tram, tramcar, trolley, trolley car +n04336792 stretcher +n04344873 studio couch, day bed +n04346328 stupa, tope +n04347754 submarine, pigboat, sub, U-boat +n04350905 suit, suit of clothes +n04355338 sundial +n04355933 sunglass +n04356056 sunglasses, dark glasses, shades +n04357314 sunscreen, sunblock, sun blocker +n04366367 suspension bridge +n04367480 swab, swob, mop +n04370456 sweatshirt +n04371430 swimming trunks, bathing trunks +n04371774 swing +n04372370 switch, electric switch, electrical switch +n04376876 syringe +n04380533 table lamp +n04389033 tank, army tank, armored combat vehicle, armoured combat vehicle +n04392985 tape player +n04398044 teapot +n04399382 teddy, teddy bear +n04404412 television, television system +n04409515 tennis ball +n04417672 thatch, thatched roof +n04418357 theater curtain, theatre curtain +n04423845 thimble +n04428191 thresher, thrasher, threshing machine +n04429376 throne +n04435653 tile roof +n04442312 toaster +n04443257 tobacco shop, tobacconist shop, tobacconist +n04447861 toilet seat +n04456115 torch +n04458633 totem pole +n04461696 tow truck, tow car, wrecker +n04462240 toyshop +n04465501 tractor +n04467665 trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi +n04476259 tray +n04479046 trench coat +n04482393 tricycle, trike, velocipede +n04483307 trimaran +n04485082 tripod +n04486054 triumphal arch +n04487081 trolleybus, trolley coach, trackless trolley +n04487394 trombone +n04493381 tub, vat +n04501370 turnstile +n04505470 typewriter keyboard +n04507155 umbrella +n04509417 unicycle, monocycle +n04515003 upright, upright piano +n04517823 vacuum, vacuum cleaner +n04522168 vase +n04523525 vault +n04525038 velvet +n04525305 vending machine +n04532106 vestment +n04532670 viaduct +n04536866 violin, fiddle +n04540053 volleyball +n04542943 waffle iron +n04548280 wall clock +n04548362 wallet, billfold, notecase, pocketbook +n04550184 wardrobe, closet, press +n04552348 warplane, military plane +n04553703 washbasin, handbasin, washbowl, lavabo, wash-hand basin +n04554684 washer, automatic washer, washing machine +n04557648 water bottle +n04560804 water jug +n04562935 water tower +n04579145 whiskey jug +n04579432 whistle +n04584207 wig +n04589890 window screen +n04590129 window shade +n04591157 Windsor tie +n04591713 wine bottle +n04592741 wing +n04596742 wok +n04597913 wooden spoon +n04599235 wool, woolen, woollen +n04604644 worm fence, snake fence, snake-rail fence, Virginia fence +n04606251 wreck +n04612504 yawl +n04613696 yurt +n06359193 web site, website, internet site, site +n06596364 comic book +n06785654 crossword puzzle, crossword +n06794110 street sign +n06874185 traffic light, traffic signal, stoplight +n07248320 book jacket, dust cover, dust jacket, dust wrapper +n07565083 menu +n07579787 plate +n07583066 guacamole +n07584110 consomme +n07590611 hot pot, hotpot +n07613480 trifle +n07614500 ice cream, icecream +n07615774 ice lolly, lolly, lollipop, popsicle +n07684084 French loaf +n07693725 bagel, beigel +n07695742 pretzel +n07697313 cheeseburger +n07697537 hotdog, hot dog, red hot +n07711569 mashed potato +n07714571 head cabbage +n07714990 broccoli +n07715103 cauliflower +n07716358 zucchini, courgette +n07716906 spaghetti squash +n07717410 acorn squash +n07717556 butternut squash +n07718472 cucumber, cuke +n07718747 artichoke, globe artichoke +n07720875 bell pepper +n07730033 cardoon +n07734744 mushroom +n07742313 Granny Smith +n07745940 strawberry +n07747607 orange +n07749582 lemon +n07753113 fig +n07753275 pineapple, ananas +n07753592 banana +n07754684 jackfruit, jak, jack +n07760859 custard apple +n07768694 pomegranate +n07802026 hay +n07831146 carbonara +n07836838 chocolate sauce, chocolate syrup +n07860988 dough +n07871810 meat loaf, meatloaf +n07873807 pizza, pizza pie +n07875152 potpie +n07880968 burrito +n07892512 red wine +n07920052 espresso +n07930864 cup +n07932039 eggnog +n09193705 alp +n09229709 bubble +n09246464 cliff, drop, drop-off +n09256479 coral reef +n09288635 geyser +n09332890 lakeside, lakeshore +n09399592 promontory, headland, head, foreland +n09421951 sandbar, sand bar +n09428293 seashore, coast, seacoast, sea-coast +n09468604 valley, vale +n09472597 volcano +n09835506 ballplayer, baseball player +n10148035 groom, bridegroom +n10565667 scuba diver +n11879895 rapeseed +n11939491 daisy +n12057211 yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum +n12144580 corn +n12267677 acorn +n12620546 hip, rose hip, rosehip +n12768682 buckeye, horse chestnut, conker +n12985857 coral fungus +n12998815 agaric +n13037406 gyromitra +n13040303 stinkhorn, carrion fungus +n13044778 earthstar +n13052670 hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa +n13054560 bolete +n13133613 ear, spike, capitulum +n15075141 toilet tissue, toilet paper, bathroom tissue diff --git a/use_case_examples/resnet/README.md b/use_case_examples/resnet/README.md index c5719e1ec..572102756 100644 --- a/use_case_examples/resnet/README.md +++ b/use_case_examples/resnet/README.md @@ -1,17 +1,76 @@ -# ResNet in FHE +# ResNet18 with Fully Homomorphic Encryption ## Overview -`resnet.py` is taken from torchvision https://github.com/pytorch/vision/blob/main/torchvision/models/resnet.py. +This project executes the ResNet18 image classification model using Fully Homomorphic Encryption (FHE) with Concrete ML. The model is adapted for FHE compatibility and tested on a small subset of tiny-imagenet (up-sampled) images. + +## ResNet18 + +The ResNet18 model is adapted from torchvision the original https://github.com/pytorch/vision/blob/main/torchvision/models/resnet.py where the adaptive average pooling layer `AdaptiveAvgPool2d` (not yet supported by Concrete ML) is replaced with a standard `AvgPool2d` layer as follows: -The main modification is the replacement of the adaptive average pooling layer with a standard average pooling layer. ```diff - self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.avgpool = nn.AvgPool2d(kernel_size=7, stride=1, padding=0) ``` -Concrete ML does not support `AdaptiveAvgPool2d` yet. +The rest is left unchanged. + +## Evaluation dataset + +The model is evaluated on images from [the Tiny ImageNet dataset](https://huggingface.co/datasets/zh-plus/tiny-imagenet). + +The `TinyImageNetProcessor` class in `utils_resnet.py` preprocesses the Tiny ImageNet dataset and aligns it with ImageNet labels for model evaluation. + +## Usage + +1. Install a virtual python environment and activate it: + +```bash +python -m venv venv +source venv/bin/activate +``` +2. Install concrete-ml: + +```bash +pip install concrete-ml +``` + +3. Install the dependencies: + +```bash +pip install -r requirements.txt +``` + + +4. Run the script: + +```bash +python resnet_fhe.py [--run_fhe] [--export_statistics] +``` + +Example of output when running the script: + +```bash +python resnet_fhe.py --run_fhe +``` + + +``` +Accuracy of the ResNet18 model on the images: 56.00% +Top-5 Accuracy of the ResNet18 model on the images: 82.00% +Compiling the model... +Model compiled successfully. +Quantized Model Accuracy of the FHEResNet18 on the images: 54.00% +Quantized Model Top-5 Accuracy of the FHEResNet18 on the images: 77.00% +FHE Simulation Accuracy of the FHEResNet18 on the images: 53.00% +FHE Simulation Top-5 Accuracy of the FHEResNet18 on the images: 75.00% +Time taken for one FHE execution: 5482.5433 seconds +``` -`resnet_fhe.ipynb` is a notebook that demonstrates how to use Concrete ML to compile and run a ResNet model in FHE along with some figures to show the accuracy vs different bit-width. +## Timing +| Device | Time (seconds) | +|--------|----------------| +| CPU | 5482.5433 | +| GPU | TBD | diff --git a/use_case_examples/resnet/folder_index_class.txt b/use_case_examples/resnet/folder_index_class.txt deleted file mode 100644 index 1efe1b5a5..000000000 --- a/use_case_examples/resnet/folder_index_class.txt +++ /dev/null @@ -1,1000 +0,0 @@ -n02119789 1 kit_fox -n02100735 2 English_setter -n02110185 3 Siberian_husky -n02096294 4 Australian_terrier -n02102040 5 English_springer -n02066245 6 grey_whale -n02509815 7 lesser_panda -n02124075 8 Egyptian_cat -n02417914 9 ibex -n02123394 10 Persian_cat -n02125311 11 cougar -n02423022 12 gazelle -n02346627 13 porcupine -n02077923 14 sea_lion -n02110063 15 malamute -n02447366 16 badger -n02109047 17 Great_Dane -n02089867 18 Walker_hound -n02102177 19 Welsh_springer_spaniel -n02091134 20 whippet -n02092002 21 Scottish_deerhound -n02071294 22 killer_whale -n02442845 23 mink -n02504458 24 African_elephant -n02092339 25 Weimaraner -n02098105 26 soft-coated_wheaten_terrier -n02096437 27 Dandie_Dinmont -n02114712 28 red_wolf -n02105641 29 Old_English_sheepdog -n02128925 30 jaguar -n02091635 31 otterhound -n02088466 32 bloodhound -n02096051 33 Airedale -n02117135 34 hyena -n02138441 35 meerkat -n02097130 36 giant_schnauzer -n02493509 37 titi -n02457408 38 three-toed_sloth -n02389026 39 sorrel -n02443484 40 black-footed_ferret -n02110341 41 dalmatian -n02089078 42 black-and-tan_coonhound -n02086910 43 papillon -n02445715 44 skunk -n02093256 45 Staffordshire_bullterrier -n02113978 46 Mexican_hairless -n02106382 47 Bouvier_des_Flandres -n02441942 48 weasel -n02113712 49 miniature_poodle -n02113186 50 Cardigan -n02105162 51 malinois -n02415577 52 bighorn -n02356798 53 fox_squirrel -n02488702 54 colobus -n02123159 55 tiger_cat -n02098413 56 Lhasa -n02422699 57 impala -n02114855 58 coyote -n02094433 59 Yorkshire_terrier -n02111277 60 Newfoundland -n02132136 61 brown_bear -n02119022 62 red_fox -n02091467 63 Norwegian_elkhound -n02106550 64 Rottweiler -n02422106 65 hartebeest -n02091831 66 Saluki -n02120505 67 grey_fox -n02104365 68 schipperke -n02086079 69 Pekinese -n02112706 70 Brabancon_griffon -n02098286 71 West_Highland_white_terrier -n02095889 72 Sealyham_terrier -n02484975 73 guenon -n02137549 74 mongoose -n02500267 75 indri -n02129604 76 tiger -n02090721 77 Irish_wolfhound -n02396427 78 wild_boar -n02108000 79 EntleBucher -n02391049 80 zebra -n02412080 81 ram -n02108915 82 French_bulldog -n02480495 83 orangutan -n02110806 84 basenji -n02128385 85 leopard -n02107683 86 Bernese_mountain_dog -n02085936 87 Maltese_dog -n02094114 88 Norfolk_terrier -n02087046 89 toy_terrier -n02100583 90 vizsla -n02096177 91 cairn -n02494079 92 squirrel_monkey -n02105056 93 groenendael -n02101556 94 clumber -n02123597 95 Siamese_cat -n02481823 96 chimpanzee -n02105505 97 komondor -n02088094 98 Afghan_hound -n02085782 99 Japanese_spaniel -n02489166 100 proboscis_monkey -n02364673 101 guinea_pig -n02114548 102 white_wolf -n02134084 103 ice_bear -n02480855 104 gorilla -n02090622 105 borzoi -n02113624 106 toy_poodle -n02093859 107 Kerry_blue_terrier -n02403003 108 ox -n02097298 109 Scotch_terrier -n02108551 110 Tibetan_mastiff -n02493793 111 spider_monkey -n02107142 112 Doberman -n02096585 113 Boston_bull -n02107574 114 Greater_Swiss_Mountain_dog -n02107908 115 Appenzeller -n02086240 116 Shih-Tzu -n02102973 117 Irish_water_spaniel -n02112018 118 Pomeranian -n02093647 119 Bedlington_terrier -n02397096 120 warthog -n02437312 121 Arabian_camel -n02483708 122 siamang -n02097047 123 miniature_schnauzer -n02106030 124 collie -n02099601 125 golden_retriever -n02093991 126 Irish_terrier -n02110627 127 affenpinscher -n02106166 128 Border_collie -n02326432 129 hare -n02108089 130 boxer -n02097658 131 silky_terrier -n02088364 132 beagle -n02111129 133 Leonberg -n02100236 134 German_short-haired_pointer -n02486261 135 patas -n02115913 136 dhole -n02486410 137 baboon -n02487347 138 macaque -n02099849 139 Chesapeake_Bay_retriever -n02108422 140 bull_mastiff -n02104029 141 kuvasz -n02492035 142 capuchin -n02110958 143 pug -n02099429 144 curly-coated_retriever -n02094258 145 Norwich_terrier -n02099267 146 flat-coated_retriever -n02395406 147 hog -n02112350 148 keeshond -n02109961 149 Eskimo_dog -n02101388 150 Brittany_spaniel -n02113799 151 standard_poodle -n02095570 152 Lakeland_terrier -n02128757 153 snow_leopard -n02101006 154 Gordon_setter -n02115641 155 dingo -n02097209 156 standard_schnauzer -n02342885 157 hamster -n02097474 158 Tibetan_terrier -n02120079 159 Arctic_fox -n02095314 160 wire-haired_fox_terrier -n02088238 161 basset -n02408429 162 water_buffalo -n02133161 163 American_black_bear -n02328150 164 Angora -n02410509 165 bison -n02492660 166 howler_monkey -n02398521 167 hippopotamus -n02112137 168 chow -n02510455 169 giant_panda -n02093428 170 American_Staffordshire_terrier -n02105855 171 Shetland_sheepdog -n02111500 172 Great_Pyrenees -n02085620 173 Chihuahua -n02123045 174 tabby -n02490219 175 marmoset -n02099712 176 Labrador_retriever -n02109525 177 Saint_Bernard -n02454379 178 armadillo -n02111889 179 Samoyed -n02088632 180 bluetick -n02090379 181 redbone -n02443114 182 polecat -n02361337 183 marmot -n02105412 184 kelpie -n02483362 185 gibbon -n02437616 186 llama -n02107312 187 miniature_pinscher -n02325366 188 wood_rabbit -n02091032 189 Italian_greyhound -n02129165 190 lion -n02102318 191 cocker_spaniel -n02100877 192 Irish_setter -n02074367 193 dugong -n02504013 194 Indian_elephant -n02363005 195 beaver -n02102480 196 Sussex_spaniel -n02113023 197 Pembroke -n02086646 198 Blenheim_spaniel -n02497673 199 Madagascar_cat -n02087394 200 Rhodesian_ridgeback -n02127052 201 lynx -n02116738 202 African_hunting_dog -n02488291 203 langur -n02091244 204 Ibizan_hound -n02114367 205 timber_wolf -n02130308 206 cheetah -n02089973 207 English_foxhound -n02105251 208 briard -n02134418 209 sloth_bear -n02093754 210 Border_terrier -n02106662 211 German_shepherd -n02444819 212 otter -n01882714 213 koala -n01871265 214 tusker -n01872401 215 echidna -n01877812 216 wallaby -n01873310 217 platypus -n01883070 218 wombat -n04086273 219 revolver -n04507155 220 umbrella -n04147183 221 schooner -n04254680 222 soccer_ball -n02672831 223 accordion -n02219486 224 ant -n02317335 225 starfish -n01968897 226 chambered_nautilus -n03452741 227 grand_piano -n03642806 228 laptop -n07745940 229 strawberry -n02690373 230 airliner -n04552348 231 warplane -n02692877 232 airship -n02782093 233 balloon -n04266014 234 space_shuttle -n03344393 235 fireboat -n03447447 236 gondola -n04273569 237 speedboat -n03662601 238 lifeboat -n02951358 239 canoe -n04612504 240 yawl -n02981792 241 catamaran -n04483307 242 trimaran -n03095699 243 container_ship -n03673027 244 liner -n03947888 245 pirate -n02687172 246 aircraft_carrier -n04347754 247 submarine -n04606251 248 wreck -n03478589 249 half_track -n04389033 250 tank -n03773504 251 missile -n02860847 252 bobsled -n03218198 253 dogsled -n02835271 254 bicycle-built-for-two -n03792782 255 mountain_bike -n03393912 256 freight_car -n03895866 257 passenger_car -n02797295 258 barrow -n04204347 259 shopping_cart -n03791053 260 motor_scooter -n03384352 261 forklift -n03272562 262 electric_locomotive -n04310018 263 steam_locomotive -n02704792 264 amphibian -n02701002 265 ambulance -n02814533 266 beach_wagon -n02930766 267 cab -n03100240 268 convertible -n03594945 269 jeep -n03670208 270 limousine -n03770679 271 minivan -n03777568 272 Model_T -n04037443 273 racer -n04285008 274 sports_car -n03444034 275 go-kart -n03445924 276 golfcart -n03785016 277 moped -n04252225 278 snowplow -n03345487 279 fire_engine -n03417042 280 garbage_truck -n03930630 281 pickup -n04461696 282 tow_truck -n04467665 283 trailer_truck -n03796401 284 moving_van -n03977966 285 police_van -n04065272 286 recreational_vehicle -n04335435 287 streetcar -n04252077 288 snowmobile -n04465501 289 tractor -n03776460 290 mobile_home -n04482393 291 tricycle -n04509417 292 unicycle -n03538406 293 horse_cart -n03599486 294 jinrikisha -n03868242 295 oxcart -n02804414 296 bassinet -n03125729 297 cradle -n03131574 298 crib -n03388549 299 four-poster -n02870880 300 bookcase -n03018349 301 china_cabinet -n03742115 302 medicine_chest -n03016953 303 chiffonier -n04380533 304 table_lamp -n03337140 305 file -n03891251 306 park_bench -n02791124 307 barber_chair -n04429376 308 throne -n03376595 309 folding_chair -n04099969 310 rocking_chair -n04344873 311 studio_couch -n04447861 312 toilet_seat -n03179701 313 desk -n03982430 314 pool_table -n03201208 315 dining_table -n03290653 316 entertainment_center -n04550184 317 wardrobe -n07742313 318 Granny_Smith -n07747607 319 orange -n07749582 320 lemon -n07753113 321 fig -n07753275 322 pineapple -n07753592 323 banana -n07754684 324 jackfruit -n07760859 325 custard_apple -n07768694 326 pomegranate -n12267677 327 acorn -n12620546 328 hip -n13133613 329 ear -n11879895 330 rapeseed -n12144580 331 corn -n12768682 332 buckeye -n03854065 333 organ -n04515003 334 upright -n03017168 335 chime -n03249569 336 drum -n03447721 337 gong -n03720891 338 maraca -n03721384 339 marimba -n04311174 340 steel_drum -n02787622 341 banjo -n02992211 342 cello -n04536866 343 violin -n03495258 344 harp -n02676566 345 acoustic_guitar -n03272010 346 electric_guitar -n03110669 347 cornet -n03394916 348 French_horn -n04487394 349 trombone -n03494278 350 harmonica -n03840681 351 ocarina -n03884397 352 panpipe -n02804610 353 bassoon -n03838899 354 oboe -n04141076 355 sax -n03372029 356 flute -n11939491 357 daisy -n12057211 358 yellow_lady's_slipper -n09246464 359 cliff -n09468604 360 valley -n09193705 361 alp -n09472597 362 volcano -n09399592 363 promontory -n09421951 364 sandbar -n09256479 365 coral_reef -n09332890 366 lakeside -n09428293 367 seashore -n09288635 368 geyser -n03498962 369 hatchet -n03041632 370 cleaver -n03658185 371 letter_opener -n03954731 372 plane -n03995372 373 power_drill -n03649909 374 lawn_mower -n03481172 375 hammer -n03109150 376 corkscrew -n02951585 377 can_opener -n03970156 378 plunger -n04154565 379 screwdriver -n04208210 380 shovel -n03967562 381 plow -n03000684 382 chain_saw -n01514668 383 cock -n01514859 384 hen -n01518878 385 ostrich -n01530575 386 brambling -n01531178 387 goldfinch -n01532829 388 house_finch -n01534433 389 junco -n01537544 390 indigo_bunting -n01558993 391 robin -n01560419 392 bulbul -n01580077 393 jay -n01582220 394 magpie -n01592084 395 chickadee -n01601694 396 water_ouzel -n01608432 397 kite -n01614925 398 bald_eagle -n01616318 399 vulture -n01622779 400 great_grey_owl -n01795545 401 black_grouse -n01796340 402 ptarmigan -n01797886 403 ruffed_grouse -n01798484 404 prairie_chicken -n01806143 405 peacock -n01806567 406 quail -n01807496 407 partridge -n01817953 408 African_grey -n01818515 409 macaw -n01819313 410 sulphur-crested_cockatoo -n01820546 411 lorikeet -n01824575 412 coucal -n01828970 413 bee_eater -n01829413 414 hornbill -n01833805 415 hummingbird -n01843065 416 jacamar -n01843383 417 toucan -n01847000 418 drake -n01855032 419 red-breasted_merganser -n01855672 420 goose -n01860187 421 black_swan -n02002556 422 white_stork -n02002724 423 black_stork -n02006656 424 spoonbill -n02007558 425 flamingo -n02009912 426 American_egret -n02009229 427 little_blue_heron -n02011460 428 bittern -n02012849 429 crane -n02013706 430 limpkin -n02018207 431 American_coot -n02018795 432 bustard -n02025239 433 ruddy_turnstone -n02027492 434 red-backed_sandpiper -n02028035 435 redshank -n02033041 436 dowitcher -n02037110 437 oystercatcher -n02017213 438 European_gallinule -n02051845 439 pelican -n02056570 440 king_penguin -n02058221 441 albatross -n01484850 442 great_white_shark -n01491361 443 tiger_shark -n01494475 444 hammerhead -n01496331 445 electric_ray -n01498041 446 stingray -n02514041 447 barracouta -n02536864 448 coho -n01440764 449 tench -n01443537 450 goldfish -n02526121 451 eel -n02606052 452 rock_beauty -n02607072 453 anemone_fish -n02643566 454 lionfish -n02655020 455 puffer -n02640242 456 sturgeon -n02641379 457 gar -n01664065 458 loggerhead -n01665541 459 leatherback_turtle -n01667114 460 mud_turtle -n01667778 461 terrapin -n01669191 462 box_turtle -n01675722 463 banded_gecko -n01677366 464 common_iguana -n01682714 465 American_chameleon -n01685808 466 whiptail -n01687978 467 agama -n01688243 468 frilled_lizard -n01689811 469 alligator_lizard -n01692333 470 Gila_monster -n01693334 471 green_lizard -n01694178 472 African_chameleon -n01695060 473 Komodo_dragon -n01704323 474 triceratops -n01697457 475 African_crocodile -n01698640 476 American_alligator -n01728572 477 thunder_snake -n01728920 478 ringneck_snake -n01729322 479 hognose_snake -n01729977 480 green_snake -n01734418 481 king_snake -n01735189 482 garter_snake -n01737021 483 water_snake -n01739381 484 vine_snake -n01740131 485 night_snake -n01742172 486 boa_constrictor -n01744401 487 rock_python -n01748264 488 Indian_cobra -n01749939 489 green_mamba -n01751748 490 sea_snake -n01753488 491 horned_viper -n01755581 492 diamondback -n01756291 493 sidewinder -n01629819 494 European_fire_salamander -n01630670 495 common_newt -n01631663 496 eft -n01632458 497 spotted_salamander -n01632777 498 axolotl -n01641577 499 bullfrog -n01644373 500 tree_frog -n01644900 501 tailed_frog -n04579432 502 whistle -n04592741 503 wing -n03876231 504 paintbrush -n03483316 505 hand_blower -n03868863 506 oxygen_mask -n04251144 507 snorkel -n03691459 508 loudspeaker -n03759954 509 microphone -n04152593 510 screen -n03793489 511 mouse -n03271574 512 electric_fan -n03843555 513 oil_filter -n04332243 514 strainer -n04265275 515 space_heater -n04330267 516 stove -n03467068 517 guillotine -n02794156 518 barometer -n04118776 519 rule -n03841143 520 odometer -n04141975 521 scale -n02708093 522 analog_clock -n03196217 523 digital_clock -n04548280 524 wall_clock -n03544143 525 hourglass -n04355338 526 sundial -n03891332 527 parking_meter -n04328186 528 stopwatch -n03197337 529 digital_watch -n04317175 530 stethoscope -n04376876 531 syringe -n03706229 532 magnetic_compass -n02841315 533 binoculars -n04009552 534 projector -n04356056 535 sunglasses -n03692522 536 loupe -n04044716 537 radio_telescope -n02879718 538 bow -n02950826 539 cannon -n02749479 540 assault_rifle -n04090263 541 rifle -n04008634 542 projectile -n03085013 543 computer_keyboard -n04505470 544 typewriter_keyboard -n03126707 545 crane -n03666591 546 lighter -n02666196 547 abacus -n02977058 548 cash_machine -n04238763 549 slide_rule -n03180011 550 desktop_computer -n03485407 551 hand-held_computer -n03832673 552 notebook -n06359193 553 web_site -n03496892 554 harvester -n04428191 555 thresher -n04004767 556 printer -n04243546 557 slot -n04525305 558 vending_machine -n04179913 559 sewing_machine -n03602883 560 joystick -n04372370 561 switch -n03532672 562 hook -n02974003 563 car_wheel -n03874293 564 paddlewheel -n03944341 565 pinwheel -n03992509 566 potter's_wheel -n03425413 567 gas_pump -n02966193 568 carousel -n04371774 569 swing -n04067472 570 reel -n04040759 571 radiator -n04019541 572 puck -n03492542 573 hard_disc -n04355933 574 sunglass -n03929660 575 pick -n02965783 576 car_mirror -n04258138 577 solar_dish -n04074963 578 remote_control -n03208938 579 disk_brake -n02910353 580 buckle -n03476684 581 hair_slide -n03627232 582 knot -n03075370 583 combination_lock -n03874599 584 padlock -n03804744 585 nail -n04127249 586 safety_pin -n04153751 587 screw -n03803284 588 muzzle -n04162706 589 seat_belt -n04228054 590 ski -n02948072 591 candle -n03590841 592 jack-o'-lantern -n04286575 593 spotlight -n04456115 594 torch -n03814639 595 neck_brace -n03933933 596 pier -n04485082 597 tripod -n03733131 598 maypole -n03794056 599 mousetrap -n04275548 600 spider_web -n01768244 601 trilobite -n01770081 602 harvestman -n01770393 603 scorpion -n01773157 604 black_and_gold_garden_spider -n01773549 605 barn_spider -n01773797 606 garden_spider -n01774384 607 black_widow -n01774750 608 tarantula -n01775062 609 wolf_spider -n01776313 610 tick -n01784675 611 centipede -n01990800 612 isopod -n01978287 613 Dungeness_crab -n01978455 614 rock_crab -n01980166 615 fiddler_crab -n01981276 616 king_crab -n01983481 617 American_lobster -n01984695 618 spiny_lobster -n01985128 619 crayfish -n01986214 620 hermit_crab -n02165105 621 tiger_beetle -n02165456 622 ladybug -n02167151 623 ground_beetle -n02168699 624 long-horned_beetle -n02169497 625 leaf_beetle -n02172182 626 dung_beetle -n02174001 627 rhinoceros_beetle -n02177972 628 weevil -n02190166 629 fly -n02206856 630 bee -n02226429 631 grasshopper -n02229544 632 cricket -n02231487 633 walking_stick -n02233338 634 cockroach -n02236044 635 mantis -n02256656 636 cicada -n02259212 637 leafhopper -n02264363 638 lacewing -n02268443 639 dragonfly -n02268853 640 damselfly -n02276258 641 admiral -n02277742 642 ringlet -n02279972 643 monarch -n02280649 644 cabbage_butterfly -n02281406 645 sulphur_butterfly -n02281787 646 lycaenid -n01910747 647 jellyfish -n01914609 648 sea_anemone -n01917289 649 brain_coral -n01924916 650 flatworm -n01930112 651 nematode -n01943899 652 conch -n01944390 653 snail -n01945685 654 slug -n01950731 655 sea_slug -n01955084 656 chiton -n02319095 657 sea_urchin -n02321529 658 sea_cucumber -n03584829 659 iron -n03297495 660 espresso_maker -n03761084 661 microwave -n03259280 662 Dutch_oven -n04111531 663 rotisserie -n04442312 664 toaster -n04542943 665 waffle_iron -n04517823 666 vacuum -n03207941 667 dishwasher -n04070727 668 refrigerator -n04554684 669 washer -n03133878 670 Crock_Pot -n03400231 671 frying_pan -n04596742 672 wok -n02939185 673 caldron -n03063689 674 coffeepot -n04398044 675 teapot -n04270147 676 spatula -n02699494 677 altar -n04486054 678 triumphal_arch -n03899768 679 patio -n04311004 680 steel_arch_bridge -n04366367 681 suspension_bridge -n04532670 682 viaduct -n02793495 683 barn -n03457902 684 greenhouse -n03877845 685 palace -n03781244 686 monastery -n03661043 687 library -n02727426 688 apiary -n02859443 689 boathouse -n03028079 690 church -n03788195 691 mosque -n04346328 692 stupa -n03956157 693 planetarium -n04081281 694 restaurant -n03032252 695 cinema -n03529860 696 home_theater -n03697007 697 lumbermill -n03065424 698 coil -n03837869 699 obelisk -n04458633 700 totem_pole -n02980441 701 castle -n04005630 702 prison -n03461385 703 grocery_store -n02776631 704 bakery -n02791270 705 barbershop -n02871525 706 bookshop -n02927161 707 butcher_shop -n03089624 708 confectionery -n04200800 709 shoe_shop -n04443257 710 tobacco_shop -n04462240 711 toyshop -n03388043 712 fountain -n03042490 713 cliff_dwelling -n04613696 714 yurt -n03216828 715 dock -n02892201 716 brass -n03743016 717 megalith -n02788148 718 bannister -n02894605 719 breakwater -n03160309 720 dam -n03000134 721 chainlink_fence -n03930313 722 picket_fence -n04604644 723 worm_fence -n04326547 724 stone_wall -n03459775 725 grille -n04239074 726 sliding_door -n04501370 727 turnstile -n03792972 728 mountain_tent -n04149813 729 scoreboard -n03530642 730 honeycomb -n03961711 731 plate_rack -n03903868 732 pedestal -n02814860 733 beacon -n07711569 734 mashed_potato -n07720875 735 bell_pepper -n07714571 736 head_cabbage -n07714990 737 broccoli -n07715103 738 cauliflower -n07716358 739 zucchini -n07716906 740 spaghetti_squash -n07717410 741 acorn_squash -n07717556 742 butternut_squash -n07718472 743 cucumber -n07718747 744 artichoke -n07730033 745 cardoon -n07734744 746 mushroom -n04209239 747 shower_curtain -n03594734 748 jean -n02971356 749 carton -n03485794 750 handkerchief -n04133789 751 sandal -n02747177 752 ashcan -n04125021 753 safe -n07579787 754 plate -n03814906 755 necklace -n03134739 756 croquet_ball -n03404251 757 fur_coat -n04423845 758 thimble -n03877472 759 pajama -n04120489 760 running_shoe -n03062245 761 cocktail_shaker -n03014705 762 chest -n03717622 763 manhole_cover -n03777754 764 modem -n04493381 765 tub -n04476259 766 tray -n02777292 767 balance_beam -n07693725 768 bagel -n03998194 769 prayer_rug -n03617480 770 kimono -n07590611 771 hot_pot -n04579145 772 whiskey_jug -n03623198 773 knee_pad -n07248320 774 book_jacket -n04277352 775 spindle -n04229816 776 ski_mask -n02823428 777 beer_bottle -n03127747 778 crash_helmet -n02877765 779 bottlecap -n04435653 780 tile_roof -n03724870 781 mask -n03710637 782 maillot -n03920288 783 Petri_dish -n03379051 784 football_helmet -n02807133 785 bathing_cap -n04399382 786 teddy -n03527444 787 holster -n03983396 788 pop_bottle -n03924679 789 photocopier -n04532106 790 vestment -n06785654 791 crossword_puzzle -n03445777 792 golf_ball -n07613480 793 trifle -n04350905 794 suit -n04562935 795 water_tower -n03325584 796 feather_boa -n03045698 797 cloak -n07892512 798 red_wine -n03250847 799 drumstick -n04192698 800 shield -n03026506 801 Christmas_stocking -n03534580 802 hoopskirt -n07565083 803 menu -n04296562 804 stage -n02869837 805 bonnet -n07871810 806 meat_loaf -n02799071 807 baseball -n03314780 808 face_powder -n04141327 809 scabbard -n04357314 810 sunscreen -n02823750 811 beer_glass -n13052670 812 hen-of-the-woods -n07583066 813 guacamole -n03637318 814 lampshade -n04599235 815 wool -n07802026 816 hay -n02883205 817 bow_tie -n03709823 818 mailbag -n04560804 819 water_jug -n02909870 820 bucket -n03207743 821 dishrag -n04263257 822 soup_bowl -n07932039 823 eggnog -n03786901 824 mortar -n04479046 825 trench_coat -n03873416 826 paddle -n02999410 827 chain -n04367480 828 swab -n03775546 829 mixing_bowl -n07875152 830 potpie -n04591713 831 wine_bottle -n04201297 832 shoji -n02916936 833 bulletproof_vest -n03240683 834 drilling_platform -n02840245 835 binder -n02963159 836 cardigan -n04370456 837 sweatshirt -n03991062 838 pot -n02843684 839 birdhouse -n03482405 840 hamper -n03942813 841 ping-pong_ball -n03908618 842 pencil_box -n03902125 843 pay-phone -n07584110 844 consomme -n02730930 845 apron -n04023962 846 punching_bag -n02769748 847 backpack -n10148035 848 groom -n02817516 849 bearskin -n03908714 850 pencil_sharpener -n02906734 851 broom -n03788365 852 mosquito_net -n02667093 853 abaya -n03787032 854 mortarboard -n03980874 855 poncho -n03141823 856 crutch -n03976467 857 Polaroid_camera -n04264628 858 space_bar -n07930864 859 cup -n04039381 860 racket -n06874185 861 traffic_light -n04033901 862 quill -n04041544 863 radio -n07860988 864 dough -n03146219 865 cuirass -n03763968 866 military_uniform -n03676483 867 lipstick -n04209133 868 shower_cap -n03782006 869 monitor -n03857828 870 oscilloscope -n03775071 871 mitten -n02892767 872 brassiere -n07684084 873 French_loaf -n04522168 874 vase -n03764736 875 milk_can -n04118538 876 rugby_ball -n03887697 877 paper_towel -n13044778 878 earthstar -n03291819 879 envelope -n03770439 880 miniskirt -n03124170 881 cowboy_hat -n04487081 882 trolleybus -n03916031 883 perfume -n02808440 884 bathtub -n07697537 885 hotdog -n12985857 886 coral_fungus -n02917067 887 bullet_train -n03938244 888 pillow -n15075141 889 toilet_tissue -n02978881 890 cassette -n02966687 891 carpenter's_kit -n03633091 892 ladle -n13040303 893 stinkhorn -n03690938 894 lotion -n03476991 895 hair_spray -n02669723 896 academic_gown -n03220513 897 dome -n03127925 898 crate -n04584207 899 wig -n07880968 900 burrito -n03937543 901 pill_bottle -n03000247 902 chain_mail -n04418357 903 theater_curtain -n04590129 904 window_shade -n02795169 905 barrel -n04553703 906 washbasin -n02783161 907 ballpoint -n02802426 908 basketball -n02808304 909 bath_towel -n03124043 910 cowboy_boot -n03450230 911 gown -n04589890 912 window_screen -n12998815 913 agaric -n02992529 914 cellular_telephone -n03825788 915 nipple -n02790996 916 barbell -n03710193 917 mailbox -n03630383 918 lab_coat -n03347037 919 fire_screen -n03769881 920 minibus -n03871628 921 packet -n03733281 922 maze -n03976657 923 pole -n03535780 924 horizontal_bar -n04259630 925 sombrero -n03929855 926 pickelhaube -n04049303 927 rain_barrel -n04548362 928 wallet -n02979186 929 cassette_player -n06596364 930 comic_book -n03935335 931 piggy_bank -n06794110 932 street_sign -n02825657 933 bell_cote -n03388183 934 fountain_pen -n04591157 935 Windsor_tie -n04540053 936 volleyball -n03866082 937 overskirt -n04136333 938 sarong -n04026417 939 purse -n02865351 940 bolo_tie -n02834397 941 bib -n03888257 942 parachute -n04235860 943 sleeping_bag -n04404412 944 television -n04371430 945 swimming_trunks -n03733805 946 measuring_cup -n07920052 947 espresso -n07873807 948 pizza -n02895154 949 breastplate -n04204238 950 shopping_basket -n04597913 951 wooden_spoon -n04131690 952 saltshaker -n07836838 953 chocolate_sauce -n09835506 954 ballplayer -n03443371 955 goblet -n13037406 956 gyromitra -n04336792 957 stretcher -n04557648 958 water_bottle -n03187595 959 dial_telephone -n04254120 960 soap_dispenser -n03595614 961 jersey -n04146614 962 school_bus -n03598930 963 jigsaw_puzzle -n03958227 964 plastic_bag -n04069434 965 reflex_camera -n03188531 966 diaper -n02786058 967 Band_Aid -n07615774 968 ice_lolly -n04525038 969 velvet -n04409515 970 tennis_ball -n03424325 971 gasmask -n03223299 972 doormat -n03680355 973 Loafer -n07614500 974 ice_cream -n07695742 975 pretzel -n04033995 976 quilt -n03710721 977 maillot -n04392985 978 tape_player -n03047690 979 clog -n03584254 980 iPod -n13054560 981 bolete -n10565667 982 scuba_diver -n03950228 983 pitcher -n03729826 984 matchstick -n02837789 985 bikini -n04254777 986 sock -n02988304 987 CD_player -n03657121 988 lens_cap -n04417672 989 thatch -n04523525 990 vault -n02815834 991 beaker -n09229709 992 bubble -n07697313 993 cheeseburger -n03888605 994 parallel_bars -n03355925 995 flagpole -n03063599 996 coffee_mug -n04116512 997 rubber_eraser -n04325704 998 stole -n07831146 999 carbonara -n03255030 1000 dumbbell \ No newline at end of file diff --git a/use_case_examples/resnet/imagenet_classes.txt b/use_case_examples/resnet/imagenet_classes.txt deleted file mode 100644 index 888d6f51d..000000000 --- a/use_case_examples/resnet/imagenet_classes.txt +++ /dev/null @@ -1,1000 +0,0 @@ -tench -goldfish -great white shark -tiger shark -hammerhead -electric ray -stingray -cock -hen -ostrich -brambling -goldfinch -house finch -junco -indigo bunting -robin -bulbul -jay -magpie -chickadee -water ouzel -kite -bald eagle -vulture -great grey owl -European fire salamander -common newt -eft -spotted salamander -axolotl -bullfrog -tree frog -tailed frog -loggerhead -leatherback turtle -mud turtle -terrapin -box turtle -banded gecko -common iguana -American chameleon -whiptail -agama -frilled lizard -alligator lizard -Gila monster -green lizard -African chameleon -Komodo dragon -African crocodile -American alligator -triceratops -thunder snake -ringneck snake -hognose snake -green snake -king snake -garter snake -water snake -vine snake -night snake -boa constrictor -rock python -Indian cobra -green mamba -sea snake -horned viper -diamondback -sidewinder -trilobite -harvestman -scorpion -black and gold garden spider -barn spider -garden spider -black widow -tarantula -wolf spider -tick -centipede -black grouse -ptarmigan -ruffed grouse -prairie chicken -peacock -quail -partridge -African grey -macaw -sulphur-crested cockatoo -lorikeet -coucal -bee eater -hornbill -hummingbird -jacamar -toucan -drake -red-breasted merganser -goose -black swan -tusker -echidna -platypus -wallaby -koala -wombat -jellyfish -sea anemone -brain coral -flatworm -nematode -conch -snail -slug -sea slug -chiton -chambered nautilus -Dungeness crab -rock crab -fiddler crab -king crab -American lobster -spiny lobster -crayfish -hermit crab -isopod -white stork -black stork -spoonbill -flamingo -little blue heron -American egret -bittern -crane -limpkin -European gallinule -American coot -bustard -ruddy turnstone -red-backed sandpiper -redshank -dowitcher -oystercatcher -pelican -king penguin -albatross -grey whale -killer whale -dugong -sea lion -Chihuahua -Japanese spaniel -Maltese dog -Pekinese -Shih-Tzu -Blenheim spaniel -papillon -toy terrier -Rhodesian ridgeback -Afghan hound -basset -beagle -bloodhound -bluetick -black-and-tan coonhound -Walker hound -English foxhound -redbone -borzoi -Irish wolfhound -Italian greyhound -whippet -Ibizan hound -Norwegian elkhound -otterhound -Saluki -Scottish deerhound -Weimaraner -Staffordshire bullterrier -American Staffordshire terrier -Bedlington terrier -Border terrier -Kerry blue terrier -Irish terrier -Norfolk terrier -Norwich terrier -Yorkshire terrier -wire-haired fox terrier -Lakeland terrier -Sealyham terrier -Airedale -cairn -Australian terrier -Dandie Dinmont -Boston bull -miniature schnauzer -giant schnauzer -standard schnauzer -Scotch terrier -Tibetan terrier -silky terrier -soft-coated wheaten terrier -West Highland white terrier -Lhasa -flat-coated retriever -curly-coated retriever -golden retriever -Labrador retriever -Chesapeake Bay retriever -German short-haired pointer -vizsla -English setter -Irish setter -Gordon setter -Brittany spaniel -clumber -English springer -Welsh springer spaniel -cocker spaniel -Sussex spaniel -Irish water spaniel -kuvasz -schipperke -groenendael -malinois -briard -kelpie -komondor -Old English sheepdog -Shetland sheepdog -collie -Border collie -Bouvier des Flandres -Rottweiler -German shepherd -Doberman -miniature pinscher -Greater Swiss Mountain dog -Bernese mountain dog -Appenzeller -EntleBucher -boxer -bull mastiff -Tibetan mastiff -French bulldog -Great Dane -Saint Bernard -Eskimo dog -malamute -Siberian husky -dalmatian -affenpinscher -basenji -pug -Leonberg -Newfoundland -Great Pyrenees -Samoyed -Pomeranian -chow -keeshond -Brabancon griffon -Pembroke -Cardigan -toy poodle -miniature poodle -standard poodle -Mexican hairless -timber wolf -white wolf -red wolf -coyote -dingo -dhole -African hunting dog -hyena -red fox -kit fox -Arctic fox -grey fox -tabby -tiger cat -Persian cat -Siamese cat -Egyptian cat -cougar -lynx -leopard -snow leopard -jaguar -lion -tiger -cheetah -brown bear -American black bear -ice bear -sloth bear -mongoose -meerkat -tiger beetle -ladybug -ground beetle -long-horned beetle -leaf beetle -dung beetle -rhinoceros beetle -weevil -fly -bee -ant -grasshopper -cricket -walking stick -cockroach -mantis -cicada -leafhopper -lacewing -dragonfly -damselfly -admiral -ringlet -monarch -cabbage butterfly -sulphur butterfly -lycaenid -starfish -sea urchin -sea cucumber -wood rabbit -hare -Angora -hamster -porcupine -fox squirrel -marmot -beaver -guinea pig -sorrel -zebra -hog -wild boar -warthog -hippopotamus -ox -water buffalo -bison -ram -bighorn -ibex -hartebeest -impala -gazelle -Arabian camel -llama -weasel -mink -polecat -black-footed ferret -otter -skunk -badger -armadillo -three-toed sloth -orangutan -gorilla -chimpanzee -gibbon -siamang -guenon -patas -baboon -macaque -langur -colobus -proboscis monkey -marmoset -capuchin -howler monkey -titi -spider monkey -squirrel monkey -Madagascar cat -indri -Indian elephant -African elephant -lesser panda -giant panda -barracouta -eel -coho -rock beauty -anemone fish -sturgeon -gar -lionfish -puffer -abacus -abaya -academic gown -accordion -acoustic guitar -aircraft carrier -airliner -airship -altar -ambulance -amphibian -analog clock -apiary -apron -ashcan -assault rifle -backpack -bakery -balance beam -balloon -ballpoint -Band Aid -banjo -bannister -barbell -barber chair -barbershop -barn -barometer -barrel -barrow -baseball -basketball -bassinet -bassoon -bathing cap -bath towel -bathtub -beach wagon -beacon -beaker -bearskin -beer bottle -beer glass -bell cote -bib -bicycle-built-for-two -bikini -binder -binoculars -birdhouse -boathouse -bobsled -bolo tie -bonnet -bookcase -bookshop -bottlecap -bow -bow tie -brass -brassiere -breakwater -breastplate -broom -bucket -buckle -bulletproof vest -bullet train -butcher shop -cab -caldron -candle -cannon -canoe -can opener -cardigan -car mirror -carousel -carpenter's kit -carton -car wheel -cash machine -cassette -cassette player -castle -catamaran -CD player -cello -cellular telephone -chain -chainlink fence -chain mail -chain saw -chest -chiffonier -chime -china cabinet -Christmas stocking -church -cinema -cleaver -cliff dwelling -cloak -clog -cocktail shaker -coffee mug -coffeepot -coil -combination lock -computer keyboard -confectionery -container ship -convertible -corkscrew -cornet -cowboy boot -cowboy hat -cradle -crane -crash helmet -crate -crib -Crock Pot -croquet ball -crutch -cuirass -dam -desk -desktop computer -dial telephone -diaper -digital clock -digital watch -dining table -dishrag -dishwasher -disk brake -dock -dogsled -dome -doormat -drilling platform -drum -drumstick -dumbbell -Dutch oven -electric fan -electric guitar -electric locomotive -entertainment center -envelope -espresso maker -face powder -feather boa -file -fireboat -fire engine -fire screen -flagpole -flute -folding chair -football helmet -forklift -fountain -fountain pen -four-poster -freight car -French horn -frying pan -fur coat -garbage truck -gasmask -gas pump -goblet -go-kart -golf ball -golfcart -gondola -gong -gown -grand piano -greenhouse -grille -grocery store -guillotine -hair slide -hair spray -half track -hammer -hamper -hand blower -hand-held computer -handkerchief -hard disc -harmonica -harp -harvester -hatchet -holster -home theater -honeycomb -hook -hoopskirt -horizontal bar -horse cart -hourglass -iPod -iron -jack-o'-lantern -jean -jeep -jersey -jigsaw puzzle -jinrikisha -joystick -kimono -knee pad -knot -lab coat -ladle -lampshade -laptop -lawn mower -lens cap -letter opener -library -lifeboat -lighter -limousine -liner -lipstick -Loafer -lotion -loudspeaker -loupe -lumbermill -magnetic compass -mailbag -mailbox -maillot -maillot -manhole cover -maraca -marimba -mask -matchstick -maypole -maze -measuring cup -medicine chest -megalith -microphone -microwave -military uniform -milk can -minibus -miniskirt -minivan -missile -mitten -mixing bowl -mobile home -Model T -modem -monastery -monitor -moped -mortar -mortarboard -mosque -mosquito net -motor scooter -mountain bike -mountain tent -mouse -mousetrap -moving van -muzzle -nail -neck brace -necklace -nipple -notebook -obelisk -oboe -ocarina -odometer -oil filter -organ -oscilloscope -overskirt -oxcart -oxygen mask -packet -paddle -paddlewheel -padlock -paintbrush -pajama -palace -panpipe -paper towel -parachute -parallel bars -park bench -parking meter -passenger car -patio -pay-phone -pedestal -pencil box -pencil sharpener -perfume -Petri dish -photocopier -pick -pickelhaube -picket fence -pickup -pier -piggy bank -pill bottle -pillow -ping-pong ball -pinwheel -pirate -pitcher -plane -planetarium -plastic bag -plate rack -plow -plunger -Polaroid camera -pole -police van -poncho -pool table -pop bottle -pot -potter's wheel -power drill -prayer rug -printer -prison -projectile -projector -puck -punching bag -purse -quill -quilt -racer -racket -radiator -radio -radio telescope -rain barrel -recreational vehicle -reel -reflex camera -refrigerator -remote control -restaurant -revolver -rifle -rocking chair -rotisserie -rubber eraser -rugby ball -rule -running shoe -safe -safety pin -saltshaker -sandal -sarong -sax -scabbard -scale -school bus -schooner -scoreboard -screen -screw -screwdriver -seat belt -sewing machine -shield -shoe shop -shoji -shopping basket -shopping cart -shovel -shower cap -shower curtain -ski -ski mask -sleeping bag -slide rule -sliding door -slot -snorkel -snowmobile -snowplow -soap dispenser -soccer ball -sock -solar dish -sombrero -soup bowl -space bar -space heater -space shuttle -spatula -speedboat -spider web -spindle -sports car -spotlight -stage -steam locomotive -steel arch bridge -steel drum -stethoscope -stole -stone wall -stopwatch -stove -strainer -streetcar -stretcher -studio couch -stupa -submarine -suit -sundial -sunglass -sunglasses -sunscreen -suspension bridge -swab -sweatshirt -swimming trunks -swing -switch -syringe -table lamp -tank -tape player -teapot -teddy -television -tennis ball -thatch -theater curtain -thimble -thresher -throne -tile roof -toaster -tobacco shop -toilet seat -torch -totem pole -tow truck -toyshop -tractor -trailer truck -tray -trench coat -tricycle -trimaran -tripod -triumphal arch -trolleybus -trombone -tub -turnstile -typewriter keyboard -umbrella -unicycle -upright -vacuum -vase -vault -velvet -vending machine -vestment -viaduct -violin -volleyball -waffle iron -wall clock -wallet -wardrobe -warplane -washbasin -washer -water bottle -water jug -water tower -whiskey jug -whistle -wig -window screen -window shade -Windsor tie -wine bottle -wing -wok -wooden spoon -wool -worm fence -wreck -yawl -yurt -web site -comic book -crossword puzzle -street sign -traffic light -book jacket -menu -plate -guacamole -consomme -hot pot -trifle -ice cream -ice lolly -French loaf -bagel -pretzel -cheeseburger -hotdog -mashed potato -head cabbage -broccoli -cauliflower -zucchini -spaghetti squash -acorn squash -butternut squash -cucumber -artichoke -bell pepper -cardoon -mushroom -Granny Smith -strawberry -orange -lemon -fig -pineapple -banana -jackfruit -custard apple -pomegranate -hay -carbonara -chocolate sauce -dough -meat loaf -pizza -potpie -burrito -red wine -espresso -cup -eggnog -alp -bubble -cliff -coral reef -geyser -lakeside -promontory -sandbar -seashore -valley -volcano -ballplayer -groom -scuba diver -rapeseed -daisy -yellow lady's slipper -corn -acorn -hip -buckeye -coral fungus -agaric -gyromitra -stinkhorn -earthstar -hen-of-the-woods -bolete -ear -toilet tissue \ No newline at end of file diff --git a/use_case_examples/resnet/requirements.txt b/use_case_examples/resnet/requirements.txt new file mode 100644 index 000000000..f6acab469 --- /dev/null +++ b/use_case_examples/resnet/requirements.txt @@ -0,0 +1 @@ +datasets==2.20.0 \ No newline at end of file diff --git a/use_case_examples/resnet/resnet.py b/use_case_examples/resnet/resnet.py index 0410c5a31..69ee83dab 100644 --- a/use_case_examples/resnet/resnet.py +++ b/use_case_examples/resnet/resnet.py @@ -4,13 +4,11 @@ import torch import torch.nn as nn from torch import Tensor - -from torchvision.transforms._presets import ImageClassification -from torchvision.utils import _log_api_usage_once -from torchvision.models._api import register_model, Weights, WeightsEnum +from torchvision.models._api import Weights, WeightsEnum, register_model from torchvision.models._meta import _IMAGENET_CATEGORIES from torchvision.models._utils import _ovewrite_named_param, handle_legacy_interface - +from torchvision.transforms._presets import ImageClassification +from torchvision.utils import _log_api_usage_once __all__ = [ "ResNet", @@ -37,7 +35,9 @@ ] -def conv3x3(in_planes: int, out_planes: int, stride: int = 1, groups: int = 1, dilation: int = 1) -> nn.Conv2d: +def conv3x3( + in_planes: int, out_planes: int, stride: int = 1, groups: int = 1, dilation: int = 1 +) -> nn.Conv2d: """3x3 convolution with padding""" return nn.Conv2d( in_planes, @@ -199,9 +199,15 @@ def __init__( self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) - self.layer2 = self._make_layer(block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) - self.layer3 = self._make_layer(block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) - self.layer4 = self._make_layer(block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) + self.layer2 = self._make_layer( + block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0] + ) + self.layer3 = self._make_layer( + block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1] + ) + self.layer4 = self._make_layer( + block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2] + ) # self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) # FIXME self.avgpool = nn.AvgPool2d(kernel_size=7, stride=1, padding=0) self.fc = nn.Linear(512 * block.expansion, num_classes) @@ -246,7 +252,14 @@ def _make_layer( layers = [] layers.append( block( - self.inplanes, planes, stride, downsample, self.groups, self.base_width, previous_dilation, norm_layer + self.inplanes, + planes, + stride, + downsample, + self.groups, + self.base_width, + previous_dilation, + norm_layer, ) ) self.inplanes = planes * block.expansion @@ -682,7 +695,9 @@ class Wide_ResNet101_2_Weights(WeightsEnum): @register_model() @handle_legacy_interface(weights=("pretrained", ResNet18_Weights.IMAGENET1K_V1)) -def resnet18_custom(*, weights: Optional[ResNet18_Weights] = None, progress: bool = True, **kwargs: Any) -> ResNet: +def resnet18_custom( + *, weights: Optional[ResNet18_Weights] = None, progress: bool = True, **kwargs: Any +) -> ResNet: """ResNet-18 from `Deep Residual Learning for Image Recognition `__. Args: diff --git a/use_case_examples/resnet/resnet_fhe.ipynb b/use_case_examples/resnet/resnet_fhe.ipynb deleted file mode 100644 index 1fb361afa..000000000 --- a/use_case_examples/resnet/resnet_fhe.ipynb +++ /dev/null @@ -1,1989 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/ubuntu/venv/lib/python3.9/site-packages/torchvision/models/_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead.\n", - " warnings.warn(\n", - "/home/ubuntu/venv/lib/python3.9/site-packages/torchvision/models/_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=ResNet18_Weights.IMAGENET1K_V1`. You can also use `weights=ResNet18_Weights.DEFAULT` to get the most up-to-date weights.\n", - " warnings.warn(msg)\n" - ] - } - ], - "source": [ - "import torch\n", - "from resnet import resnet18_custom\n", - "# from torchvision.models.quantization import resnet18 as load_resnet18\n", - "# from torchvision.models.quantization import ResNet18_QuantizedWeights\n", - "from concrete.ml.torch.compile import compile_torch_model\n", - "\n", - "\n", - "# Load the ResNet18 model without pretrained weights\n", - "resnet18 = resnet18_custom(pretrained=True)\n", - "\n", - "# # Step 1: Initialize model with the best available weights\n", - "# weights = ResNet18_QuantizedWeights.DEFAULT\n", - "# resnet18 = load_resnet18(weights=weights, quantize=True)\n", - "# resnet18.eval()\n" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'resnet18.onnx'" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Define a dummy input tensor with the size of 1x3x224x224 (batch size x channels x height x width)\n", - "dummy_input = torch.randn(10, 3, 224, 224)\n", - "\n", - "# Set the model to evaluation mode\n", - "resnet18.eval()\n", - "\n", - "# Path to save the ONNX model\n", - "onnx_model_path = \"resnet18.onnx\"\n", - "\n", - "# Export the model to ONNX format\n", - "torch.onnx.export(resnet18, dummy_input, onnx_model_path, verbose=False)\n", - "\n", - "onnx_model_path" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "ResNet(\n", - " (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n", - " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (relu): ReLU(inplace=True)\n", - " (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n", - " (layer1): Sequential(\n", - " (0): BasicBlock(\n", - " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (relu): ReLU(inplace=True)\n", - " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " )\n", - " (1): BasicBlock(\n", - " (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (relu): ReLU(inplace=True)\n", - " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " )\n", - " )\n", - " (layer2): Sequential(\n", - " (0): BasicBlock(\n", - " (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", - " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (relu): ReLU(inplace=True)\n", - " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (downsample): Sequential(\n", - " (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", - " (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " )\n", - " )\n", - " (1): BasicBlock(\n", - " (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (relu): ReLU(inplace=True)\n", - " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " )\n", - " )\n", - " (layer3): Sequential(\n", - " (0): BasicBlock(\n", - " (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", - " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (relu): ReLU(inplace=True)\n", - " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (downsample): Sequential(\n", - " (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", - " (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " )\n", - " )\n", - " (1): BasicBlock(\n", - " (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (relu): ReLU(inplace=True)\n", - " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " )\n", - " )\n", - " (layer4): Sequential(\n", - " (0): BasicBlock(\n", - " (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", - " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (relu): ReLU(inplace=True)\n", - " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (downsample): Sequential(\n", - " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", - " (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " )\n", - " )\n", - " (1): BasicBlock(\n", - " (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " (relu): ReLU(inplace=True)\n", - " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", - " (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", - " )\n", - " )\n", - " (avgpool): AvgPool2d(kernel_size=7, stride=1, padding=0)\n", - " (fc): Linear(in_features=512, out_features=1000, bias=True)\n", - ")" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "resnet18" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Accuracy of the ResNet18 model on the images: 90.90909091%\n", - "Top-5 Accuracy of the ResNet18 model on the images: 95.45454545%\n" - ] - } - ], - "source": [ - "from io import BytesIO\n", - "\n", - "import requests\n", - "import torch\n", - "from PIL import Image\n", - "from torchvision import models, transforms\n", - "\n", - "\n", - "# Read the ImageNet classes file and create a list\n", - "with open(\"./imagenet_classes.txt\", \"r\") as f:\n", - " imagenet_classes = [line.strip() for line in f.readlines()]\n", - "\n", - "\n", - "# Create a dictionary to map class names to indices\n", - "class_to_index = {cls: idx for idx, cls in enumerate(imagenet_classes)}\n", - "\n", - "\n", - "transform = transforms.Compose(\n", - " [\n", - " transforms.Resize(256),\n", - " transforms.CenterCrop(224),\n", - " transforms.ToTensor(),\n", - " transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n", - " ]\n", - ")\n", - "\n", - "# Download an example image from the web\n", - "image_urls = [\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01443537_goldfish.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01614925_bald_eagle.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01697457_African_crocodile.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01592084_chickadee.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01601694_water_ouzel.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01739381_vine_snake.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01806567_quail.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01917289_brain_coral.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02077923_sea_lion.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02051845_pelican.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02110185_Siberian_husky.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02165456_ladybug.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02325366_wood_rabbit.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02391049_zebra.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02481823_chimpanzee.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02510455_giant_panda.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02643566_lionfish.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02787622_banjo.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02817516_bearskin.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02871525_bookshop.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02930766_cab.JPEG\",\n", - " \"https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02974003_car_wheel.JPEG\",\n", - "]\n", - "\n", - "# Download and transform images, and collect labels\n", - "images = []\n", - "labels = []\n", - "for image_url in image_urls:\n", - " class_name = '_'.join(image_url.split('/')[-1].split('.')[0].split('_')[1:]).replace('_', ' ')\n", - " if class_name in class_to_index:\n", - " response = requests.get(image_url)\n", - " img = Image.open(BytesIO(response.content))\n", - " img = transform(img)\n", - " images.append(img)\n", - " labels.append(class_to_index[class_name])\n", - "\n", - "# Stack images to create a mini-batch\n", - "images = torch.stack(images)\n", - "labels = torch.tensor(labels)\n", - "\n", - "# Forward pass through the model to get the predictions\n", - "with torch.no_grad():\n", - " outputs = resnet18(images)\n", - " _, predicted = torch.max(outputs, 1)\n", - "\n", - "\n", - "def calculate_accuracy(predicted, labels):\n", - " correct = (predicted == labels).sum().item()\n", - " total = labels.size(0)\n", - " accuracy = 100 * correct / total\n", - " return accuracy\n", - "\n", - "def calculate_topk_accuracy(outputs, labels, topk=5):\n", - " _, topk_predicted = torch.topk(outputs, topk, dim=1)\n", - " correct_topk = sum([labels[i] in topk_predicted[i] for i in range(len(labels))])\n", - " total = labels.size(0)\n", - " topk_accuracy = 100 * correct_topk / total\n", - " return topk_accuracy\n", - "\n", - "with torch.no_grad():\n", - " outputs = resnet18(images)\n", - " _, predicted = torch.max(outputs, 1)\n", - "\n", - "accuracy = calculate_accuracy(predicted, labels)\n", - "print(f\"Accuracy of the ResNet18 model on the images: {accuracy:.8f}%\")\n", - "\n", - "topk_accuracy = calculate_topk_accuracy(outputs, labels, topk=5)\n", - "print(f\"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy:.8f}%\")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "0.9090909361839294" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from concrete.ml.torch.compile import build_quantized_module, compile_torch_model\n", - "\n", - "# q_module = compile_torch_model(\n", - "# resnet18,\n", - "# torch_inputset=images,\n", - "# n_bits={\"model_inputs\": 8, \"op_inputs\": 6, \"op_weights\": 6, \"model_outputs\": 8},\n", - "# # rounding_threshold_bits={\"n_bits\": 6, \"method\":\"APPROXIMATE\"},\n", - "# )\n", - "from concrete.fhe import Configuration, ParameterSelectionStrategy\n", - "\n", - "# config = Configuration(parameter_selection_strategy = ParameterSelectionStrategy.MONO, single_precision = True)\n", - "q_module = compile_torch_model(\n", - " resnet18,\n", - " torch_inputset=images,\n", - " n_bits={\"model_inputs\": 8, \"op_inputs\": 6, \"op_weights\": 6, \"model_outputs\": 8},\n", - " rounding_threshold_bits={\"n_bits\": 7, \"method\":\"APPROXIMATE\"},\n", - " # configuration=config,\n", - " # p_error = 0.9\n", - ")\n", - "\n", - "with torch.no_grad():\n", - " outputs_fhe = q_module.forward(images.detach().numpy(), fhe=\"disable\")\n", - " probabilities_fhe = torch.nn.functional.softmax(torch.from_numpy(outputs_fhe), dim=-1)\n", - " outputs = resnet18(images)\n", - " probabilities = torch.nn.functional.softmax(outputs, dim=-1)\n", - "\n", - "# Calculate and store accuracy\n", - "fhe_accuracy_vs_fp32 = (\n", - " (probabilities_fhe.argmax(-1) == probabilities.argmax(-1)).float().mean().item()\n", - ")\n", - "fhe_accuracy_vs_fp32" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " %0 = _x # EncryptedTensor ∈ [-32, 31]\n", - " %1 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %2 = -4 # ClearScalar ∈ [-4, -4]\n", - " %3 = multiply(%1, %2) # EncryptedTensor ∈ [-4, -4]\n", - " %4 = (%3[:, :, 3:227, 3:227] = %0) # EncryptedTensor ∈ [-32, 31]\n", - " %5 = [[[[ 0 ... -1 -1]]]] # ClearTensor ∈ [-24, 31] @ /conv1/Conv.conv\n", - " %6 = conv2d(%4, %5, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-5898, 5936] @ /conv1/Conv.conv\n", - " %7 = round_bit_pattern(%6, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-5888, 5888] @ /conv1/Conv.conv_rounding\n", - " %8 = subgraph(%7) # EncryptedTensor ∈ [0, 63]\n", - " %9 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %10 = 0 # ClearScalar ∈ [0, 0]\n", - " %11 = multiply(%9, %10) # EncryptedTensor ∈ [0, 0]\n", - " %12 = (%11[:, :, 1:113, 1:113] = %8) # EncryptedTensor ∈ [0, 63]\n", - " %13 = maxpool2d(%12, kernel_shape=(3, 3), strides=(2, 2), pads=(0, 0, 0, 0), dilations=(1, 1), ceil_mode=False) # EncryptedTensor ∈ [0, 63]\n", - " %14 = subgraph(%13) # EncryptedTensor ∈ [0, 63]\n", - " %15 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %16 = 0 # ClearScalar ∈ [0, 0]\n", - " %17 = multiply(%15, %16) # EncryptedTensor ∈ [0, 0]\n", - " %18 = (%17[:, :, 1:57, 1:57] = %14) # EncryptedTensor ∈ [0, 63]\n", - " %19 = [[[[ 2 - ... 0 -3]]]] # ClearTensor ∈ [-31, 28] @ /layer1/layer1.0/conv1/Conv.conv\n", - " %20 = conv2d(%18, %19, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-7704, 2874] @ /layer1/layer1.0/conv1/Conv.conv\n", - " %21 = round_bit_pattern(%20, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-7680, 2816] @ /layer1/layer1.0/conv1/Conv.conv_rounding\n", - " %22 = subgraph(%21) # EncryptedTensor ∈ [0, 62]\n", - " %23 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %24 = 0 # ClearScalar ∈ [0, 0]\n", - " %25 = multiply(%23, %24) # EncryptedTensor ∈ [0, 0]\n", - " %26 = (%25[:, :, 1:57, 1:57] = %22) # EncryptedTensor ∈ [0, 62]\n", - " %27 = [[[[ 1 - ... -3 0]]]] # ClearTensor ∈ [-31, 21] @ /layer1/layer1.0/conv2/Conv.conv\n", - " %28 = conv2d(%26, %27, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4267, 3767] @ /layer1/layer1.0/conv2/Conv.conv\n", - " %29 = round_bit_pattern(%28, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4224, 3712] @ /layer1/layer1.0/conv2/Conv.conv_rounding\n", - " %30 = subgraph(%29) # EncryptedTensor ∈ [-32, 31]\n", - " %31 = subgraph(%13) # EncryptedTensor ∈ [-32, 31]\n", - " %32 = subgraph(%30) # EncryptedTensor ∈ [-105, 90]\n", - " %33 = subgraph(%31) # EncryptedTensor ∈ [0, 142]\n", - " %34 = 1 # ClearScalar ∈ [1, 1]\n", - " %35 = multiply(%34, %33) # EncryptedTensor ∈ [0, 142]\n", - " %36 = add(%32, %35) # EncryptedTensor ∈ [-103, 150]\n", - " %37 = subgraph(%36) # EncryptedTensor ∈ [0, 63]\n", - " %38 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %39 = 0 # ClearScalar ∈ [0, 0]\n", - " %40 = multiply(%38, %39) # EncryptedTensor ∈ [0, 0]\n", - " %41 = (%40[:, :, 1:57, 1:57] = %37) # EncryptedTensor ∈ [0, 63]\n", - " %42 = [[[[ 1 0 ... 1 -5]]]] # ClearTensor ∈ [-29, 31] @ /layer1/layer1.1/conv1/Conv.conv\n", - " %43 = conv2d(%41, %42, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-5571, 4822] @ /layer1/layer1.1/conv1/Conv.conv\n", - " %44 = round_bit_pattern(%43, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-5632, 4864] @ /layer1/layer1.1/conv1/Conv.conv_rounding\n", - " %45 = subgraph(%44) # EncryptedTensor ∈ [0, 63]\n", - " %46 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %47 = 0 # ClearScalar ∈ [0, 0]\n", - " %48 = multiply(%46, %47) # EncryptedTensor ∈ [0, 0]\n", - " %49 = (%48[:, :, 1:57, 1:57] = %45) # EncryptedTensor ∈ [0, 63]\n", - " %50 = [[[[ -1 ... 10 3]]]] # ClearTensor ∈ [-31, 23] @ /layer1/layer1.1/conv2/Conv.conv\n", - " %51 = conv2d(%49, %50, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-5422, 3434] @ /layer1/layer1.1/conv2/Conv.conv\n", - " %52 = round_bit_pattern(%51, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-5376, 3456] @ /layer1/layer1.1/conv2/Conv.conv_rounding\n", - " %53 = subgraph(%52) # EncryptedTensor ∈ [-32, 31]\n", - " %54 = subgraph(%36) # EncryptedTensor ∈ [-32, 31]\n", - " %55 = subgraph(%53) # EncryptedTensor ∈ [-135, 89]\n", - " %56 = subgraph(%54) # EncryptedTensor ∈ [0, 92]\n", - " %57 = 1 # ClearScalar ∈ [1, 1]\n", - " %58 = multiply(%57, %56) # EncryptedTensor ∈ [0, 92]\n", - " %59 = add(%55, %58) # EncryptedTensor ∈ [-125, 130]\n", - " %60 = subgraph(%59) # EncryptedTensor ∈ [0, 63]\n", - " %61 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %62 = 0 # ClearScalar ∈ [0, 0]\n", - " %63 = multiply(%61, %62) # EncryptedTensor ∈ [0, 0]\n", - " %64 = (%63[:, :, 1:57, 1:57] = %60) # EncryptedTensor ∈ [0, 63]\n", - " %65 = [[[[-4 -7 ... -3 3]]]] # ClearTensor ∈ [-21, 31] @ /layer2/layer2.0/conv1/Conv.conv\n", - " %66 = conv2d(%64, %65, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3130, 2989] @ /layer2/layer2.0/conv1/Conv.conv\n", - " %67 = round_bit_pattern(%66, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3136, 3008] @ /layer2/layer2.0/conv1/Conv.conv_rounding\n", - " %68 = subgraph(%67) # EncryptedTensor ∈ [0, 63]\n", - " %69 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %70 = 0 # ClearScalar ∈ [0, 0]\n", - " %71 = multiply(%69, %70) # EncryptedTensor ∈ [0, 0]\n", - " %72 = (%71[:, :, 1:29, 1:29] = %68) # EncryptedTensor ∈ [0, 63]\n", - " %73 = [[[[ 0 ... -1 0]]]] # ClearTensor ∈ [-21, 31] @ /layer2/layer2.0/conv2/Conv.conv\n", - " %74 = conv2d(%72, %73, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4567, 6827] @ /layer2/layer2.0/conv2/Conv.conv\n", - " %75 = round_bit_pattern(%74, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4608, 6784] @ /layer2/layer2.0/conv2/Conv.conv_rounding\n", - " %76 = subgraph(%59) # EncryptedTensor ∈ [0, 63]\n", - " %77 = [[[[ 1]] ... [[ 1]]]] # ClearTensor ∈ [-25, 31] @ /layer2/layer2.0/downsample/downsample.0/Conv.conv\n", - " %78 = conv2d(%76, %77, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-1242, 985] @ /layer2/layer2.0/downsample/downsample.0/Conv.conv\n", - " %79 = round_bit_pattern(%78, lsbs_to_remove=5, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-1248, 992] @ /layer2/layer2.0/downsample/downsample.0/Conv.conv_rounding\n", - " %80 = subgraph(%75) # EncryptedTensor ∈ [-32, 29]\n", - " %81 = subgraph(%79) # EncryptedTensor ∈ [-32, 31]\n", - " %82 = subgraph(%80) # EncryptedTensor ∈ [-71, 126]\n", - " %83 = subgraph(%81) # EncryptedTensor ∈ [-78, 66]\n", - " %84 = 1 # ClearScalar ∈ [1, 1]\n", - " %85 = multiply(%84, %83) # EncryptedTensor ∈ [-78, 66]\n", - " %86 = add(%82, %85) # EncryptedTensor ∈ [-105, 138]\n", - " %87 = subgraph(%86) # EncryptedTensor ∈ [0, 59]\n", - " %88 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %89 = 0 # ClearScalar ∈ [0, 0]\n", - " %90 = multiply(%88, %89) # EncryptedTensor ∈ [0, 0]\n", - " %91 = (%90[:, :, 1:29, 1:29] = %87) # EncryptedTensor ∈ [0, 59]\n", - " %92 = [[[[ 0 ... 0 -1]]]] # ClearTensor ∈ [-26, 31] @ /layer2/layer2.1/conv1/Conv.conv\n", - " %93 = conv2d(%91, %92, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3652, 3308] @ /layer2/layer2.1/conv1/Conv.conv\n", - " %94 = round_bit_pattern(%93, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3648, 3328] @ /layer2/layer2.1/conv1/Conv.conv_rounding\n", - " %95 = subgraph(%94) # EncryptedTensor ∈ [0, 63]\n", - " %96 = ones() # EncryptedTensor ∈ [1, 1]\n", - " %97 = 0 # ClearScalar ∈ [0, 0]\n", - " %98 = multiply(%96, %97) # EncryptedTensor ∈ [0, 0]\n", - " %99 = (%98[:, :, 1:29, 1:29] = %95) # EncryptedTensor ∈ [0, 63]\n", - "%100 = [[[[-1 0 ... -1 -3]]]] # ClearTensor ∈ [-31, 22] @ /layer2/layer2.1/conv2/Conv.conv\n", - "%101 = conv2d(%99, %100, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3240, 2807] @ /layer2/layer2.1/conv2/Conv.conv\n", - "%102 = round_bit_pattern(%101, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3264, 2816] @ /layer2/layer2.1/conv2/Conv.conv_rounding\n", - "%103 = subgraph(%102) # EncryptedTensor ∈ [-30, 30]\n", - "%104 = subgraph(%86) # EncryptedTensor ∈ [-32, 27]\n", - "%105 = subgraph(%103) # EncryptedTensor ∈ [-103, 96]\n", - "%106 = subgraph(%104) # EncryptedTensor ∈ [0, 153]\n", - "%107 = 1 # ClearScalar ∈ [1, 1]\n", - "%108 = multiply(%107, %106) # EncryptedTensor ∈ [0, 153]\n", - "%109 = add(%105, %108) # EncryptedTensor ∈ [-103, 142]\n", - "%110 = subgraph(%109) # EncryptedTensor ∈ [0, 61]\n", - "%111 = ones() # EncryptedTensor ∈ [1, 1]\n", - "%112 = 0 # ClearScalar ∈ [0, 0]\n", - "%113 = multiply(%111, %112) # EncryptedTensor ∈ [0, 0]\n", - "%114 = (%113[:, :, 1:29, 1:29] = %110) # EncryptedTensor ∈ [0, 61]\n", - "%115 = [[[[-1 -1 ... 0 -1]]]] # ClearTensor ∈ [-24, 31] @ /layer3/layer3.0/conv1/Conv.conv\n", - "%116 = conv2d(%114, %115, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4397, 3425] @ /layer3/layer3.0/conv1/Conv.conv\n", - "%117 = round_bit_pattern(%116, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4352, 3456] @ /layer3/layer3.0/conv1/Conv.conv_rounding\n", - "%118 = subgraph(%117) # EncryptedTensor ∈ [0, 63]\n", - "%119 = ones() # EncryptedTensor ∈ [1, 1]\n", - "%120 = 0 # ClearScalar ∈ [0, 0]\n", - "%121 = multiply(%119, %120) # EncryptedTensor ∈ [0, 0]\n", - "%122 = (%121[:, :, 1:15, 1:15] = %118) # EncryptedTensor ∈ [0, 63]\n", - "%123 = [[[[ 0 -2 ... -1 -2]]]] # ClearTensor ∈ [-21, 31] @ /layer3/layer3.0/conv2/Conv.conv\n", - "%124 = conv2d(%122, %123, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4434, 5529] @ /layer3/layer3.0/conv2/Conv.conv\n", - "%125 = round_bit_pattern(%124, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4480, 5504] @ /layer3/layer3.0/conv2/Conv.conv_rounding\n", - "%126 = subgraph(%109) # EncryptedTensor ∈ [0, 61]\n", - "%127 = [[[[ 0]] ... [[ 1]]]] # ClearTensor ∈ [-31, 24] @ /layer3/layer3.0/downsample/downsample.0/Conv.conv\n", - "%128 = conv2d(%126, %127, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-1238, 850] @ /layer3/layer3.0/downsample/downsample.0/Conv.conv\n", - "%129 = round_bit_pattern(%128, lsbs_to_remove=5, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-1248, 864] @ /layer3/layer3.0/downsample/downsample.0/Conv.conv_rounding\n", - "%130 = subgraph(%125) # EncryptedTensor ∈ [-31, 31]\n", - "%131 = subgraph(%129) # EncryptedTensor ∈ [-32, 31]\n", - "%132 = subgraph(%130) # EncryptedTensor ∈ [-99, 137]\n", - "%133 = subgraph(%131) # EncryptedTensor ∈ [-58, 36]\n", - "%134 = 1 # ClearScalar ∈ [1, 1]\n", - "%135 = multiply(%134, %133) # EncryptedTensor ∈ [-58, 36]\n", - "%136 = add(%132, %135) # EncryptedTensor ∈ [-113, 149]\n", - "%137 = subgraph(%136) # EncryptedTensor ∈ [0, 63]\n", - "%138 = ones() # EncryptedTensor ∈ [1, 1]\n", - "%139 = 0 # ClearScalar ∈ [0, 0]\n", - "%140 = multiply(%138, %139) # EncryptedTensor ∈ [0, 0]\n", - "%141 = (%140[:, :, 1:15, 1:15] = %137) # EncryptedTensor ∈ [0, 63]\n", - "%142 = [[[[ 3 3 ... 0 2]]]] # ClearTensor ∈ [-26, 31] @ /layer3/layer3.1/conv1/Conv.conv\n", - "%143 = conv2d(%141, %142, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4838, 3516] @ /layer3/layer3.1/conv1/Conv.conv\n", - "%144 = round_bit_pattern(%143, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4864, 3456] @ /layer3/layer3.1/conv1/Conv.conv_rounding\n", - "%145 = subgraph(%144) # EncryptedTensor ∈ [0, 61]\n", - "%146 = ones() # EncryptedTensor ∈ [1, 1]\n", - "%147 = 0 # ClearScalar ∈ [0, 0]\n", - "%148 = multiply(%146, %147) # EncryptedTensor ∈ [0, 0]\n", - "%149 = (%148[:, :, 1:15, 1:15] = %145) # EncryptedTensor ∈ [0, 61]\n", - "%150 = [[[[-2 -1 ... -4 -2]]]] # ClearTensor ∈ [-31, 23] @ /layer3/layer3.1/conv2/Conv.conv\n", - "%151 = conv2d(%149, %150, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3753, 2990] @ /layer3/layer3.1/conv2/Conv.conv\n", - "%152 = round_bit_pattern(%151, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3776, 3008] @ /layer3/layer3.1/conv2/Conv.conv_rounding\n", - "%153 = subgraph(%152) # EncryptedTensor ∈ [-31, 29]\n", - "%154 = subgraph(%136) # EncryptedTensor ∈ [-32, 31]\n", - "%155 = subgraph(%153) # EncryptedTensor ∈ [-124, 102]\n", - "%156 = subgraph(%154) # EncryptedTensor ∈ [0, 130]\n", - "%157 = 1 # ClearScalar ∈ [1, 1]\n", - "%158 = multiply(%157, %156) # EncryptedTensor ∈ [0, 130]\n", - "%159 = add(%155, %158) # EncryptedTensor ∈ [-124, 137]\n", - "%160 = subgraph(%159) # EncryptedTensor ∈ [0, 63]\n", - "%161 = ones() # EncryptedTensor ∈ [1, 1]\n", - "%162 = 0 # ClearScalar ∈ [0, 0]\n", - "%163 = multiply(%161, %162) # EncryptedTensor ∈ [0, 0]\n", - "%164 = (%163[:, :, 1:15, 1:15] = %160) # EncryptedTensor ∈ [0, 63]\n", - "%165 = [[[[-1 -1 ... 1 0]]]] # ClearTensor ∈ [-16, 31] @ /layer4/layer4.0/conv1/Conv.conv\n", - "%166 = conv2d(%164, %165, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3987, 2570] @ /layer4/layer4.0/conv1/Conv.conv\n", - "%167 = round_bit_pattern(%166, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3968, 2560] @ /layer4/layer4.0/conv1/Conv.conv_rounding\n", - "%168 = subgraph(%167) # EncryptedTensor ∈ [0, 56]\n", - "%169 = ones() # EncryptedTensor ∈ [1, 1]\n", - "%170 = 0 # ClearScalar ∈ [0, 0]\n", - "%171 = multiply(%169, %170) # EncryptedTensor ∈ [0, 0]\n", - "%172 = (%171[:, :, 1:8, 1:8] = %168) # EncryptedTensor ∈ [0, 56]\n", - "%173 = [[[[ 0 -1 ... 0 1]]]] # ClearTensor ∈ [-19, 31] @ /layer4/layer4.0/conv2/Conv.conv\n", - "%174 = conv2d(%172, %173, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-3351, 2455] @ /layer4/layer4.0/conv2/Conv.conv\n", - "%175 = round_bit_pattern(%174, lsbs_to_remove=6, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-3328, 2432] @ /layer4/layer4.0/conv2/Conv.conv_rounding\n", - "%176 = subgraph(%159) # EncryptedTensor ∈ [0, 63]\n", - "%177 = [[[[ 0]] ... [[ 0]]]] # ClearTensor ∈ [-26, 31] @ /layer4/layer4.0/downsample/downsample.0/Conv.conv\n", - "%178 = conv2d(%176, %177, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(2, 2), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-954, 1167] @ /layer4/layer4.0/downsample/downsample.0/Conv.conv\n", - "%179 = round_bit_pattern(%178, lsbs_to_remove=5, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-960, 1152] @ /layer4/layer4.0/downsample/downsample.0/Conv.conv_rounding\n", - "%180 = subgraph(%175) # EncryptedTensor ∈ [-31, 26]\n", - "%181 = subgraph(%179) # EncryptedTensor ∈ [-31, 31]\n", - "%182 = subgraph(%180) # EncryptedTensor ∈ [-101, 98]\n", - "%183 = subgraph(%181) # EncryptedTensor ∈ [-62, 58]\n", - "%184 = 1 # ClearScalar ∈ [1, 1]\n", - "%185 = multiply(%184, %183) # EncryptedTensor ∈ [-62, 58]\n", - "%186 = add(%182, %185) # EncryptedTensor ∈ [-119, 124]\n", - "%187 = subgraph(%186) # EncryptedTensor ∈ [0, 57]\n", - "%188 = ones() # EncryptedTensor ∈ [1, 1]\n", - "%189 = 0 # ClearScalar ∈ [0, 0]\n", - "%190 = multiply(%188, %189) # EncryptedTensor ∈ [0, 0]\n", - "%191 = (%190[:, :, 1:8, 1:8] = %187) # EncryptedTensor ∈ [0, 57]\n", - "%192 = [[[[-1 0 ... 0 0]]]] # ClearTensor ∈ [-19, 31] @ /layer4/layer4.1/conv1/Conv.conv\n", - "%193 = conv2d(%191, %192, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4430, 2208] @ /layer4/layer4.1/conv1/Conv.conv\n", - "%194 = round_bit_pattern(%193, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4480, 2176] @ /layer4/layer4.1/conv1/Conv.conv_rounding\n", - "%195 = subgraph(%194) # EncryptedTensor ∈ [0, 63]\n", - "%196 = ones() # EncryptedTensor ∈ [1, 1]\n", - "%197 = 0 # ClearScalar ∈ [0, 0]\n", - "%198 = multiply(%196, %197) # EncryptedTensor ∈ [0, 0]\n", - "%199 = (%198[:, :, 1:8, 1:8] = %195) # EncryptedTensor ∈ [0, 63]\n", - "%200 = [[[[ 0 1 ... 1 1]]]] # ClearTensor ∈ [-20, 31] @ /layer4/layer4.1/conv2/Conv.conv\n", - "%201 = conv2d(%199, %200, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [-4275, 8105] @ /layer4/layer4.1/conv2/Conv.conv\n", - "%202 = round_bit_pattern(%201, lsbs_to_remove=7, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [-4224, 8064] @ /layer4/layer4.1/conv2/Conv.conv_rounding\n", - "%203 = subgraph(%202) # EncryptedTensor ∈ [-32, 31]\n", - "%204 = subgraph(%186) # EncryptedTensor ∈ [-32, 25]\n", - "%205 = subgraph(%203) # EncryptedTensor ∈ [-79, 170]\n", - "%206 = subgraph(%204) # EncryptedTensor ∈ [0, 31]\n", - "%207 = 1 # ClearScalar ∈ [1, 1]\n", - "%208 = multiply(%207, %206) # EncryptedTensor ∈ [0, 31]\n", - "%209 = add(%205, %208) # EncryptedTensor ∈ [-79, 173]\n", - "%210 = subgraph(%209) # EncryptedTensor ∈ [0, 62]\n", - "%211 = [[[[1 1 1 ... 1 1 1]]]] # ClearTensor ∈ [0, 1] @ /avgpool/AveragePool.avgpool\n", - "%212 = conv2d(%210, %211, [0 0 0 0 0 ... 0 0 0 0 0], pads=[0, 0, 0, 0], strides=(1, 1), dilations=(1, 1), group=1) # EncryptedTensor ∈ [0, 1066] @ /avgpool/AveragePool.avgpool\n", - "%213 = round_bit_pattern(%212, lsbs_to_remove=4, overflow_protection=False, exactness=Exactness.APPROXIMATE) # EncryptedTensor ∈ [0, 1072] @ /avgpool/AveragePool.avgpool_rounding\n", - "%214 = subgraph(%213) # EncryptedTensor ∈ [0, 252]\n", - "%215 = reshape(%214, newshape=(1, 512)) # EncryptedTensor ∈ [0, 252]\n", - "%216 = subgraph(%215) # EncryptedTensor ∈ [0, 62]\n", - "%217 = [[-1 -4 -1 ... 1 -2 3]] # ClearTensor ∈ [-12, 31] @ /fc/Gemm.matmul\n", - "%218 = matmul(%216, %217) # EncryptedTensor ∈ [-2482, 6604] @ /fc/Gemm.matmul\n", - "return %218\n", - "\n", - "Subgraphs:\n", - "\n", - " %8 = subgraph(%7):\n", - "\n", - " %0 = input # EncryptedTensor @ /conv1/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[ -12]] ... [[ 12]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0009595604707281525 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 2.300 ... 19e-01]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = maximum(%9, %10) # EncryptedTensor\n", - " %12 = 0.09438851724215917 # ClearScalar\n", - " %13 = divide(%11, %12) # EncryptedTensor\n", - " %14 = 0 # ClearScalar\n", - " %15 = add(%13, %14) # EncryptedTensor\n", - " %16 = rint(%15) # EncryptedTensor\n", - " %17 = 0 # ClearScalar\n", - " %18 = 63 # ClearScalar\n", - " %19 = clip(%16, %17, %18) # EncryptedTensor\n", - " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", - " return %20\n", - "\n", - " %14 = subgraph(%13):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = subtract(%1, %2) # EncryptedTensor\n", - " %4 = 0.09438851724215917 # ClearScalar\n", - " %5 = multiply(%3, %4) # EncryptedTensor\n", - " %6 = 0.09438851523020911 # ClearScalar\n", - " %7 = divide(%5, %6) # EncryptedTensor\n", - " %8 = 0 # ClearScalar\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = rint(%9) # EncryptedTensor\n", - " %11 = 0 # ClearScalar\n", - " %12 = 63 # ClearScalar\n", - " %13 = clip(%10, %11, %12) # EncryptedTensor\n", - " %14 = astype(%13, dtype=int_) # EncryptedTensor\n", - " return %14\n", - "\n", - " %22 = subgraph(%21):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer1/layer1.0/conv1/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0011402867925128443 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 3.686 ... 25e-01]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = maximum(%9, %10) # EncryptedTensor\n", - " %12 = 0.04016368453965985 # ClearScalar\n", - " %13 = divide(%11, %12) # EncryptedTensor\n", - " %14 = 0 # ClearScalar\n", - " %15 = add(%13, %14) # EncryptedTensor\n", - " %16 = rint(%15) # EncryptedTensor\n", - " %17 = 0 # ClearScalar\n", - " %18 = 63 # ClearScalar\n", - " %19 = clip(%16, %17, %18) # EncryptedTensor\n", - " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", - " return %20\n", - "\n", - " %30 = subgraph(%29):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer1/layer1.0/conv2/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0009986261721062137 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 1.030 ... 10e-01]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.13032058768807805 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = 2 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %31 = subgraph(%13):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = subtract(%1, %2) # EncryptedTensor\n", - " %4 = 0.09438851724215917 # ClearScalar\n", - " %5 = multiply(%3, %4) # EncryptedTensor\n", - " %6 = 0.09438851523020911 # ClearScalar\n", - " %7 = divide(%5, %6) # EncryptedTensor\n", - " %8 = -32 # ClearScalar\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = rint(%9) # EncryptedTensor\n", - " %11 = -32 # ClearScalar\n", - " %12 = 31 # ClearScalar\n", - " %13 = clip(%10, %11, %12) # EncryptedTensor\n", - " %14 = astype(%13, dtype=int_) # EncryptedTensor\n", - " return %14\n", - "\n", - " %32 = subgraph(%30):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 2.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.13032058768807805 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.042009954818961026 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %33 = subgraph(%31):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = -32.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.09438851523020911 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.042009954818961026 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %37 = subgraph(%36):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.042009954818961026 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.09935687727024116 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %45 = subgraph(%44):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer1/layer1.1/conv1/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0008972804642336202 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 0.203 ... 52077 ]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = maximum(%9, %10) # EncryptedTensor\n", - " %12 = 0.05179850536952156 # ClearScalar\n", - " %13 = divide(%11, %12) # EncryptedTensor\n", - " %14 = 0 # ClearScalar\n", - " %15 = add(%13, %14) # EncryptedTensor\n", - " %16 = rint(%15) # EncryptedTensor\n", - " %17 = 0 # ClearScalar\n", - " %18 = 63 # ClearScalar\n", - " %19 = clip(%16, %17, %18) # EncryptedTensor\n", - " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", - " return %20\n", - "\n", - " %53 = subgraph(%52):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer1/layer1.1/conv2/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0017501061706694404 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[-0.113 ... 587016]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.24257694819947365 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = 6 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %54 = subgraph(%36):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.042009954818961026 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.09935687727024116 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = -32 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = -32 # ClearScalar\n", - " %13 = 31 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %55 = subgraph(%53):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 6.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.24257694819947365 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.06837324171233662 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %56 = subgraph(%54):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = -32.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.09935687727024116 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.06837324171233662 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %60 = subgraph(%59):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.06837324171233662 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.14000235207764164 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %68 = subgraph(%67):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer2/layer2.0/conv1/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0009605777806920079 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[-1.222 ... 35e-02]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = maximum(%9, %10) # EncryptedTensor\n", - " %12 = 0.04240615494644075 # ClearScalar\n", - " %13 = divide(%11, %12) # EncryptedTensor\n", - " %14 = 0 # ClearScalar\n", - " %15 = add(%13, %14) # EncryptedTensor\n", - " %16 = rint(%15) # EncryptedTensor\n", - " %17 = 0 # ClearScalar\n", - " %18 = 63 # ClearScalar\n", - " %19 = clip(%16, %17, %18) # EncryptedTensor\n", - " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", - " return %20\n", - "\n", - " %76 = subgraph(%59):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.06837324171233662 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.14000235207764164 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %80 = subgraph(%75):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer2/layer2.0/conv2/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0009905944290260569 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 3.249 ... 09e-02]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.1534375406464771 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = -10 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %81 = subgraph(%79):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer2/layer2.0/downsample/downsample.0/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.003126346405717952 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 0.184 ... 109817]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.1084907392607087 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = 2 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %82 = subgraph(%80):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = -10.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.1534375406464771 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.04733662649264896 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %83 = subgraph(%81):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 2.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.1084907392607087 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.04733662649264896 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %87 = subgraph(%86):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.04733662649264896 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.11045212848284759 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %95 = subgraph(%94):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer2/layer2.1/conv1/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0011082646273757157 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 5.801 ... 23e-01]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = maximum(%9, %10) # EncryptedTensor\n", - " %12 = 0.05601460751206594 # ClearScalar\n", - " %13 = divide(%11, %12) # EncryptedTensor\n", - " %14 = 0 # ClearScalar\n", - " %15 = add(%13, %14) # EncryptedTensor\n", - " %16 = rint(%15) # EncryptedTensor\n", - " %17 = 0 # ClearScalar\n", - " %18 = 63 # ClearScalar\n", - " %19 = clip(%16, %17, %18) # EncryptedTensor\n", - " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", - " return %20\n", - "\n", - " %103 = subgraph(%102):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer2/layer2.1/conv2/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0015848130037245337 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[-1.135 ... 16e-02]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.1418038087531616 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = 1 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %104 = subgraph(%86):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.04733662649264896 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.11045212848284759 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = -32 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = -32 # ClearScalar\n", - " %13 = 31 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %105 = subgraph(%103):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 1.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.1418038087531616 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.04268310452940874 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %106 = subgraph(%104):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = -32.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.11045212848284759 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.04268310452940874 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %110 = subgraph(%109):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.04268310452940874 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.09959391056862038 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %118 = subgraph(%117):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer3/layer3.0/conv1/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0007572907035253597 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[-2.011 ... 45e-01]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = maximum(%9, %10) # EncryptedTensor\n", - " %12 = 0.04280234014005014 # ClearScalar\n", - " %13 = divide(%11, %12) # EncryptedTensor\n", - " %14 = 0 # ClearScalar\n", - " %15 = add(%13, %14) # EncryptedTensor\n", - " %16 = rint(%15) # EncryptedTensor\n", - " %17 = 0 # ClearScalar\n", - " %18 = 63 # ClearScalar\n", - " %19 = clip(%16, %17, %18) # EncryptedTensor\n", - " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", - " return %20\n", - "\n", - " %126 = subgraph(%109):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.04268310452940874 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.09959391056862038 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %130 = subgraph(%125):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer3/layer3.0/conv2/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0007786148656779211 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 0.159 ... 642246]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.11637498747540001 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = -5 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %131 = subgraph(%129):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer3/layer3.0/downsample/downsample.0/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0013113023333722424 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 0.036 ... 50447 ]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.045917581353283936 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = 7 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %132 = subgraph(%130):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = -5.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.11637498747540001 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.030653778146597723 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %133 = subgraph(%131):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 7.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.045917581353283936 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.030653778146597723 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %137 = subgraph(%136):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.030653778146597723 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.07103891443497251 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %145 = subgraph(%144):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer3/layer3.1/conv1/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0006241869374887713 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[-7.669 ... 21e-02]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = maximum(%9, %10) # EncryptedTensor\n", - " %12 = 0.035695123544226944 # ClearScalar\n", - " %13 = divide(%11, %12) # EncryptedTensor\n", - " %14 = 0 # ClearScalar\n", - " %15 = add(%13, %14) # EncryptedTensor\n", - " %16 = rint(%15) # EncryptedTensor\n", - " %17 = 0 # ClearScalar\n", - " %18 = 63 # ClearScalar\n", - " %19 = clip(%16, %17, %18) # EncryptedTensor\n", - " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", - " return %20\n", - "\n", - " %153 = subgraph(%152):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer3/layer3.1/conv2/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.001117051080468814 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 1.028 ... 86e-02]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.12919705453311078 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = 2 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %154 = subgraph(%136):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.030653778146597723 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.07103891443497251 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = -32 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = -32 # ClearScalar\n", - " %13 = 31 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %155 = subgraph(%153):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 2.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.12919705453311078 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.0343202671787269 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %156 = subgraph(%154):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = -32.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.07103891443497251 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.0343202671787269 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %160 = subgraph(%159):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.0343202671787269 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.07299866352300644 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %168 = subgraph(%167):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer4/layer4.0/conv1/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0007102601305192167 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[-0.113 ... 820322]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = maximum(%9, %10) # EncryptedTensor\n", - " %12 = 0.03581596196540163 # ClearScalar\n", - " %13 = divide(%11, %12) # EncryptedTensor\n", - " %14 = 0 # ClearScalar\n", - " %15 = add(%13, %14) # EncryptedTensor\n", - " %16 = rint(%15) # EncryptedTensor\n", - " %17 = 0 # ClearScalar\n", - " %18 = 63 # ClearScalar\n", - " %19 = clip(%16, %17, %18) # EncryptedTensor\n", - " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", - " return %20\n", - "\n", - " %176 = subgraph(%159):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.0343202671787269 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.07299866352300644 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %180 = subgraph(%175):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer4/layer4.0/conv2/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0013214642859977128 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 0.483 ... 12071 ]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.1327200367532172 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = -2 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %181 = subgraph(%179):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer4/layer4.0/downsample/downsample.0/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0023505757716739957 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[-3.347 ... 96e-01]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.0739922691886633 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = 1 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %182 = subgraph(%180):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = -2.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.1327200367532172 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.038004591072633646 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %183 = subgraph(%181):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 1.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.0739922691886633 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.038004591072633646 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %187 = subgraph(%186):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.038004591072633646 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.0820416569187012 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %195 = subgraph(%194):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer4/layer4.1/conv1/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0007761212732305896 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 0.235 ... 420937]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = maximum(%9, %10) # EncryptedTensor\n", - " %12 = 0.025757889498509705 # ClearScalar\n", - " %13 = divide(%11, %12) # EncryptedTensor\n", - " %14 = 0 # ClearScalar\n", - " %15 = add(%13, %14) # EncryptedTensor\n", - " %16 = rint(%15) # EncryptedTensor\n", - " %17 = 0 # ClearScalar\n", - " %18 = 63 # ClearScalar\n", - " %19 = clip(%16, %17, %18) # EncryptedTensor\n", - " %20 = astype(%19, dtype=int_) # EncryptedTensor\n", - " return %20\n", - "\n", - " %203 = subgraph(%202):\n", - "\n", - " %0 = input # EncryptedTensor @ /layer4/layer4.1/conv2/Conv.conv_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0 # ClearScalar\n", - " %3 = add(%1, %2) # EncryptedTensor\n", - " %4 = [[[[0]] [ ... ] [[0]]]] # ClearTensor\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.0030313779134727974 # ClearScalar\n", - " %7 = multiply(%6, %5) # EncryptedTensor\n", - " %8 = [[[[ 0.465 ... 483366]]]] # ClearTensor\n", - " %9 = add(%7, %8) # EncryptedTensor\n", - " %10 = 0.5993932655712262 # ClearScalar\n", - " %11 = divide(%9, %10) # EncryptedTensor\n", - " %12 = -12 # ClearScalar\n", - " %13 = add(%11, %12) # EncryptedTensor\n", - " %14 = rint(%13) # EncryptedTensor\n", - " %15 = -32 # ClearScalar\n", - " %16 = 31 # ClearScalar\n", - " %17 = clip(%14, %15, %16) # EncryptedTensor\n", - " %18 = astype(%17, dtype=int_) # EncryptedTensor\n", - " return %18\n", - "\n", - " %204 = subgraph(%186):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.038004591072633646 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.0820416569187012 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = -32 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = -32 # ClearScalar\n", - " %13 = 31 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %205 = subgraph(%203):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = -12.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.5993932655712262 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.15133961144301405 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %206 = subgraph(%204):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = -32.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.0820416569187012 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.15133961144301405 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = astype(%9, dtype=int_) # EncryptedTensor\n", - " return %10\n", - "\n", - " %210 = subgraph(%209):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.15133961144301405 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0 # ClearScalar\n", - " %6 = maximum(%4, %5) # EncryptedTensor\n", - " %7 = 0.4227900256185789 # ClearScalar\n", - " %8 = divide(%6, %7) # EncryptedTensor\n", - " %9 = 0 # ClearScalar\n", - " %10 = add(%8, %9) # EncryptedTensor\n", - " %11 = rint(%10) # EncryptedTensor\n", - " %12 = 0 # ClearScalar\n", - " %13 = 63 # ClearScalar\n", - " %14 = clip(%11, %12, %13) # EncryptedTensor\n", - " %15 = astype(%14, dtype=int_) # EncryptedTensor\n", - " return %15\n", - "\n", - " %214 = subgraph(%213):\n", - "\n", - " %0 = input # EncryptedTensor @ /avgpool/AveragePool.avgpool_rounding\n", - " %1 = astype(%0, dtype=float64) # EncryptedTensor\n", - " %2 = 0.02040816326530612 # ClearScalar\n", - " %3 = multiply(%1, %2) # EncryptedTensor\n", - " %4 = 0 # ClearScalar\n", - " %5 = subtract(%3, %4) # EncryptedTensor\n", - " %6 = 0.4227900256185789 # ClearScalar\n", - " %7 = multiply(%5, %6) # EncryptedTensor\n", - " %8 = 0.03665712827162568 # ClearScalar\n", - " %9 = divide(%7, %8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = add(%9, %10) # EncryptedTensor\n", - " %12 = rint(%11) # EncryptedTensor\n", - " %13 = 0 # ClearScalar\n", - " %14 = 255 # ClearScalar\n", - " %15 = clip(%12, %13, %14) # EncryptedTensor\n", - " %16 = astype(%15, dtype=int_) # EncryptedTensor\n", - " return %16\n", - "\n", - " %216 = subgraph(%215):\n", - "\n", - " %0 = input # EncryptedTensor\n", - " %1 = 0.0 # ClearScalar\n", - " %2 = subtract(%0, %1) # EncryptedTensor\n", - " %3 = 0.03665712827162568 # ClearScalar\n", - " %4 = multiply(%3, %2) # EncryptedTensor\n", - " %5 = 0.14837409062324677 # ClearScalar\n", - " %6 = divide(%4, %5) # EncryptedTensor\n", - " %7 = 0 # ClearScalar\n", - " %8 = add(%6, %7) # EncryptedTensor\n", - " %9 = rint(%8) # EncryptedTensor\n", - " %10 = 0 # ClearScalar\n", - " %11 = 63 # ClearScalar\n", - " %12 = clip(%9, %10, %11) # EncryptedTensor\n", - " %13 = astype(%12, dtype=int_) # EncryptedTensor\n", - " return %13\n" - ] - } - ], - "source": [ - "print(q_module.fhe_circuit)" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "weight_matrix = q_module.quant_layers_dict['193'][1].constant_inputs[1].qvalues\n", - "\n", - "np.savetxt(\"weight_matrix_1000x512.csv\", weight_matrix, delimiter=\",\")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "2856957" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "open(\"resnet18.mlir\", \"w\").write(q_module.fhe_circuit.mlir)\n", - "open(\"resnet18.graph\", \"w\").write(q_module.fhe_circuit.graph.format(maximum_constant_length=10000))" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "# Get quantized input\n", - "quantized_input = q_module.quantize_input(images.detach().numpy()[:1])\n", - "expected_quantized_output = q_module.fhe_circuit.graph(quantized_input)\n", - "\n", - "# Save input / output to disk\n", - "import numpy as np\n", - "\n", - "# Save quantized input and expected output to disk\n", - "np.save(\"quantized_input.npy\", quantized_input)\n", - "np.save(\"expected_quantized_output.npy\", expected_quantized_output)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[[[-24, -24, -24, ..., -27, -27, -27],\n", - " [-23, -24, -24, ..., -27, -27, -26],\n", - " [-23, -24, -24, ..., -27, -26, -26],\n", - " ...,\n", - " [-26, -25, -25, ..., -26, -26, -26],\n", - " [-26, -25, -25, ..., -26, -25, -25],\n", - " [-26, -25, -25, ..., -25, -25, -25]],\n", - "\n", - " [[-21, -21, -20, ..., -26, -25, -25],\n", - " [-20, -20, -20, ..., -25, -25, -25],\n", - " [-20, -20, -21, ..., -25, -25, -24],\n", - " ...,\n", - " [-24, -22, -22, ..., -24, -24, -23],\n", - " [-24, -23, -23, ..., -24, -23, -23],\n", - " [-24, -24, -23, ..., -24, -23, -23]],\n", - "\n", - " [[-19, -19, -19, ..., -23, -23, -22],\n", - " [-18, -19, -19, ..., -23, -22, -22],\n", - " [-18, -19, -19, ..., -22, -22, -22],\n", - " ...,\n", - " [-21, -20, -20, ..., -21, -21, -21],\n", - " [-21, -21, -21, ..., -21, -21, -21],\n", - " [-21, -21, -21, ..., -21, -21, -21]]]])" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "quantized_input" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Accuracy of the FHEResNet18 model on the images: 86.36363636%\n", - "Top-5 Accuracy of the FHEResNet18 model on the images: 95.45454545%\n" - ] - } - ], - "source": [ - "with torch.no_grad():\n", - " outputs = q_module.forward(images.detach().numpy(), fhe=\"disable\")\n", - " outputs = torch.from_numpy(outputs)\n", - " _, predicted = torch.max(outputs, 1)\n", - "\n", - "accuracy = calculate_accuracy(predicted, labels)\n", - "print(f\"Accuracy of the FHEResNet18 model on the images: {accuracy:.8f}%\")\n", - "\n", - "topk_accuracy = calculate_topk_accuracy(outputs, labels, topk=5)\n", - "print(f\"Top-5 Accuracy of the FHEResNet18 model on the images: {topk_accuracy:.8f}%\")" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Accuracy of the FHEResNet18 model on the images: 86.36363636%\n", - "Top-5 Accuracy of the FHEResNet18 model on the images: 95.45454545%\n" - ] - } - ], - "source": [ - "with torch.no_grad():\n", - " outputs = q_module.forward(images.detach().numpy(), fhe=\"simulate\")\n", - " outputs = torch.from_numpy(outputs)\n", - " _, predicted = torch.max(outputs, 1)\n", - "\n", - "accuracy = calculate_accuracy(predicted, labels)\n", - "print(f\"Accuracy of the FHEResNet18 model on the images: {accuracy:.8f}%\")\n", - "\n", - "topk_accuracy = calculate_topk_accuracy(outputs, labels, topk=5)\n", - "print(f\"Top-5 Accuracy of the FHEResNet18 model on the images: {topk_accuracy:.8f}%\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import time\n", - "\n", - "# Run a single example with fhe=execute to check the time\n", - "q_module.fhe_circuit.keygen()\n", - "\n", - "start = time.time()\n", - "outputs = q_module.forward(images.detach().numpy(), fhe=\"execute\")\n", - "end = time.time()\n", - "\n", - "print(f\"Time taken for one fhe execution: {end - start} seconds\")" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "139496591" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Save the FHE circuit to an MLIR file\n", - "# open(\"resnet.mlir\", \"w\").write(q_module.fhe_circuit.mlir)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/tmp/ipykernel_4331/766114212.py:51: UserWarning: FixedFormatter should only be used together with FixedLocator\n", - " ax.set_xticklabels([\"\"] + rounding_threshold_bits_range, rotation=45)\n", - "/tmp/ipykernel_4331/766114212.py:52: UserWarning: FixedFormatter should only be used together with FixedLocator\n", - " ax.set_yticklabels([\"\"] + list(n_bits_range))\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAwcAAALECAYAAABdU0zhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAACsY0lEQVR4nOzdd3wU1frH8e+m904SSiAIht6boSsIIiCoVL3SFBXFxvWHIooVEQuKDS4oghcUC6BcUJBqRXrvvQQSCKkkpO3O7w9gYc1Gs0h2E/N5v17zgpx9ZubMkwT27HPOjMkwDEMAAAAAyj03V3cAAAAAQOnA4AAAAACAJAYHAAAAAC5icAAAAABAEoMDAAAAABcxOAAAAAAgicEBAAAAgIsYHAAAAACQxOAAAAAAwEUMDgCUaUlJSerTp4/Cw8NlMpn0zjvvuLpLV+X1119X7dq1ZbFY/jTuyJEjMplMmjlzprXthRdekMlkKpF+XTrfm2+++Zexf6cfAwYMUL9+/a5qXwDAtcPgAChBH374oUwmk1q1auXqrvxjPfHEE1q6dKnGjBmj//73v7rllluKjDWZTHa36Ohoa8ylN7jJycl2jxEbG6sePXoU67gmk0kPPvjgX15DRkaGJk6cqKeeekpubuXzn+WnnnpK8+bN09atW13dlSJ17NixyO/znj17JEmrV6+2aff09NR1112nQYMG6dChQ9ZjnT9/Xvfee6/q16+v4OBgBQQEqFGjRpo8ebLy8/NtzrtixQoNGzZMcXFx8vPz03XXXaf77rtPp06dcur1AygfPFzdAeCfbM6cOYqNjdW6det04MAB1axZ09Vd+sdZuXKlevXqpSeffLJY8TfffLMGDRpk0+br6/u3+2HvuJIUFxf3l/vOmDFDBQUFGjhw4FWd+9lnn9XTTz99VfuWFk2aNFHz5s311ltv6dNPP3V1d4pUpUoVTZgwoVB7pUqVbL5+9NFH1aJFC+Xn52vTpk2aNm2aFi9erO3bt6tSpUo6f/68du7cqVtvvVWxsbFyc3PTb7/9pieeeEJr167VZ599Zj3WU089pZSUFPXt21fXX3+9Dh06pPfff1+LFi3Sli1bbAa3APB3MTgASsjhw4f122+/af78+XrggQc0Z84cPf/8867ull1ZWVny9/d3dTeuyunTpxUSElLs+Li4OP3rX/+65v34O8f95JNPdNttt8nHx+eq9vfw8JCHR9n/57xfv356/vnn9eGHHyogIMDV3bErODi4WN/ndu3aqU+fPpKkoUOHKi4uTo8++qhmzZqlMWPGKCwsTL///rvNPg8++KCCg4P1/vvva9KkSdY3/ZMmTVLbtm1tqkq33HKLOnTooPfff1+vvPLKNbxCAOVd+axfA04wZ84chYaGqnv37urTp4/mzJljNy4tLU1PPPGEYmNj5e3trSpVqmjQoEE201pycnL0wgsvKC4uTj4+PqpYsaLuuOMOHTx4UNLlqQyrV6+2Oba9+elDhgxRQECADh48qFtvvVWBgYG6++67JUk///yz+vbtq6pVq8rb21sxMTF64okndP78+UL93rNnj/r166cKFSrI19dXtWrV0tixYyVJq1atkslk0oIFCwrt99lnn8lkMmnNmjV/mr9Dhw6pb9++CgsLk5+fn2644QYtXrzY+vrMmTNlMplkGIY++OAD6zSOsubw4cPatm2bOnfuXOi1tLQ0DRkyRMHBwQoJCdHgwYOVlpZWKM7eXP9ly5apbdu2CgkJUUBAgGrVqqVnnnnG+npeXp7GjRunZs2aKTg4WP7+/mrXrp1WrVpVZF/ffvttVatWTb6+vurQoYN27NhRrGucPXu2mjVrJl9fX4WFhWnAgAE6fvx4obibb75ZWVlZWrZs2Z8er379+rrxxhsLtVssFlWuXNn6plyS5s6dq2bNmikwMFBBQUFq0KCBJk+eXKx+X0s33XSTpAvf7z8TGxsrSTbf5/bt2xeabta+fXuFhYVp9+7d17SfAFD2P2oCSqk5c+bojjvukJeXlwYOHKgpU6Zo/fr1atGihTXm3LlzateunXbv3q1hw4apadOmSk5O1sKFC3XixAlFRETIbDarR48eWrFihQYMGKDHHntMmZmZWrZsmXbs2KEaNWo43LeCggJ17dpVbdu21Ztvvik/Pz9J0ldffaXs7GyNGDFC4eHhWrdund577z2dOHFCX331lXX/bdu2qV27dvL09NT999+v2NhYHTx4UP/73/80fvx4dezYUTExMZozZ45uv/32QnmpUaOG4uPji+xfUlKSWrdurezsbD366KMKDw/XrFmzdNttt+nrr7/W7bffrvbt2+u///2v7rnnniKn9NiTk5NTaD1BYGCgvL29bdpSUlLs7l/UgmF7x5WkoKAgeXl5Fdmf3377TZLUtGlTm3bDMNSrVy/98ssvevDBB1WnTh0tWLBAgwcPLvJYl+zcuVM9evRQw4YN9dJLL8nb21sHDhzQr7/+ao3JyMjQRx99pIEDB2r48OHKzMzUxx9/rK5du2rdunVq3LixzTE//fRTZWZm6uGHH1ZOTo4mT56sm266Sdu3b1dUVFSRfRk/fryee+459evXT/fdd5/OnDmj9957T+3bt9fmzZttqj5169aVr6+vfv3110I/N1fq37+/XnjhBSUmJtpMqfnll1908uRJDRgwQNKFAdLAgQPVqVMnTZw4UZK0e/du/frrr3rsscf+Mo/2mM3mQt9nHx+fv6x0XBrIh4eH27Tn5eUpIyND58+f14YNG/Tmm2+qWrVqfzkF8dy5czp37pwiIiKu4ioA4E8YAK65DRs2GJKMZcuWGYZhGBaLxahSpYrx2GOP2cSNGzfOkGTMnz+/0DEsFothGIYxY8YMQ5IxadKkImNWrVplSDJWrVpl8/rhw4cNScYnn3xibRs8eLAhyXj66acLHS87O7tQ24QJEwyTyWQcPXrU2ta+fXsjMDDQpu3K/hiGYYwZM8bw9vY20tLSrG2nT582PDw8jOeff77Qea70+OOPG5KMn3/+2dqWmZlpVK9e3YiNjTXMZrO1XZLx8MMP/+nxroy1t12Zn+eff77IuEtb9+7di3VcScbnn3/+p3169tlnDUlGZmamTfs333xjSDJef/11a1tBQYHRrl27Ivt8ydtvv21IMs6cOVPkeQsKCozc3FybttTUVCMqKsoYNmyYte3Sz5Cvr69x4sQJa/vatWsNScYTTzxRZD+OHDliuLu7G+PHj7c5z/bt2w0PD49C7YZhGHFxcUa3bt2K7LdhGMbevXsNScZ7771n0/7QQw8ZAQEB1p/jxx57zAgKCjIKCgr+9HjF1aFDB7vf48GDB1tjLv0uzpgxwzhz5oxx8uRJY/HixUZsbKxhMpmM9evX2xzz888/tzlW8+bNjW3btv1lX15++WVDkrFixYprcm0AcAmVA6AEzJkzR1FRUdapDyaTSf3799fs2bP11ltvyd3dXZI0b948NWrUyO6npJemicybN08RERF65JFHioy5GiNGjCjUduXC3KysLJ0/f16tW7eWYRjavHmzqlatqjNnzuinn37SY489pqpVqxbZn0GDBmnChAn6+uuvde+990qSvvjiCxUUFPzlnO3vvvtOLVu2VNu2ba1tAQEBuv/++zVmzBjt2rVL9evXv6rr7tWrl0aOHGnTVq9evUJx8+bNU1BQUKH2ovpu77iS1KBBgz/tz9mzZ+Xh4VHok+fvvvtOHh4eNt8nd3d3PfLII/r555//9JiXPo3/9ttvNXToULt3QHJ3d7f+HFosFqWlpclisah58+batGlTofjevXurcuXK1q9btmypVq1a6bvvvtOkSZPs9mP+/PmyWCzq16+fzaft0dHRuv7667Vq1SqbqU6SFBoaWuSdoi6Ji4tT48aN9cUXX1hzbjab9fXXX6tnz57Wn+OQkBDrNKU/u4uVI2JjYzV9+nSbtj8uRpakYcOG2XxdoUIFzZo1S82bN7dpv/HGG7Vs2TKlpaVpxYoV2rp1q7Kysv60Dz/99JNefPFF9evXzzpdCQCuFQYHwDVmNps1d+5c3XjjjTbzi1u1aqW33npLK1asUJcuXSRdmGpw5513/unxDh48qFq1al3TBaceHh6qUqVKofZjx45p3LhxWrhwoVJTU21eS09PlyTr7Rj/6s157dq11aJFC82ZM8c6OJgzZ45uuOGGv5wycfToUbu3f61Tp4719asdHFSpUsXu/P4/at++vd0pG0UtGi7ucYvr6NGjqlixYqFBQ61atf5y3/79++ujjz7Sfffdp6efflqdOnXSHXfcoT59+tgMFGbNmqW33npLe/bssbl9ZvXq1Qsd8/rrry/UFhcXpy+//LLIfuzfv1+GYdjdV5I8PT0LtRmGUaxBb//+/fXMM88oISFBlStX1urVq3X69Gn179/fGvPQQw/pyy+/VLdu3VS5cmV16dJF/fr1+1sDBX9//2J9n8eNG6d27drJ3d1dERERqlOnjt3f4aioKOu0rD59+ujVV1/VzTffrP3799u9C9GePXt0++23q379+vroo4+u+joAoCgsSAausZUrV+rUqVOaO3eurr/+eut26QFPRS1M/juKejNlNpvttnt7exf6NNlsNuvmm2/W4sWL9dRTT+mbb77RsmXLrIuZ/+rhXPYMGjRIP/74o06cOKGDBw/q999/L5E7BZVl4eHhKigoUGZm5jU7pq+vr3766SctX75c99xzj7Zt26b+/fvr5ptvtv5MzJ49W0OGDFGNGjX08ccfa8mSJVq2bJluuummq/pe22OxWGQymazH/uP2n//8p9A+qampxZpH379/fxmGYV0L8+WXXyo4ONjmjX9kZKS2bNmihQsX6rbbbtOqVavUrVu3Yq3b+LsaNGigzp0768Ybb1SDBg2KPbjv06ePzp07p2+//bbQa8ePH1eXLl0UHBys7777ToGBgde62wBA5QC41ubMmaPIyEh98MEHhV6bP3++FixYoKlTp8rX11c1atT4yzu+1KhRQ2vXrlV+fr7dT1qlC1MxJBW6k83Ro0eL3e/t27dr3759mjVrls3i3j/eOea6666TpGLdqWbAgAEaNWqUPv/8c50/f16enp42n+wWpVq1atq7d2+h9ksPmqpWrdpfHqOsqF27tqQLd7Fp2LChtb1atWpasWKFzp07Z1M9sJcXe9zc3NSpUyd16tRJkyZN0quvvqqxY8dq1apV6ty5s77++mtdd911mj9/vs3gsqjb7e7fv79Q2759+6x317GnRo0aMgxD1atXL9bzHgoKCnT8+HHddtttfxlbvXp1tWzZ0jq1aP78+erdu3ehheVeXl7q2bOnevbsKYvFooceekj/+c9/9Nxzz5XK545cujPYpUrdJWfPnlWXLl2Um5urFStWqGLFiq7oHoBygMoBcA2dP39e8+fPV48ePdSnT59C28iRI5WZmamFCxdKku68805t3brV7i0/DcOwxiQnJ+v9998vMqZatWpyd3fXTz/9ZPP6hx9+WOy+X5p/fumYl/7+x9s+VqhQQe3bt9eMGTN07Ngxu/25JCIiQt26ddPs2bM1Z84c3XLLLcX6VPjWW2/VunXrbG53mpWVpWnTpik2NlZ169Yt9nWVdpfu2rRhwwab9ltvvVUFBQWaMmWKtc1sNuu99977y2Pau9PSpbsP5ebmSrL//V67dm2Rt5j95ptvlJCQYP163bp1Wrt2rbp161ZkP+644w65u7vrxRdfLPSzYRiGzp49a9O2a9cu5eTkqHXr1n9ydZf1799fv//+u2bMmKHk5ORCA88/Ht/Nzc06ALuUh/z8fO3Zs8fpTxtOTk4ulBNJ1qlCV65NyMrK0q233qqEhAR99913RU7TAoBrgcoBcA0tXLhQmZmZRX7yecMNN6hChQqaM2eO+vfvr//7v//T119/rb59+2rYsGFq1qyZUlJStHDhQk2dOlWNGjXSoEGD9Omnn2rUqFFat26d2rVrp6ysLC1fvlwPPfSQevXqpeDgYPXt21fvvfeeTCaTatSooUWLFun06dPF7nvt2rVVo0YNPfnkk0pISFBQUJDmzZtXaO2BJL377rtq27atmjZtqvvvv1/Vq1fXkSNHtHjxYm3ZssUmdtCgQdb7zr/88svF6svTTz+tzz//XN26ddOjjz6qsLAwzZo1S4cPH9a8efPsLrB1tX379mn27NmF2qOionTzzTcXud91112n+vXra/ny5TaLWHv27Kk2bdro6aef1pEjR1S3bl3Nnz+/0CfK9rz00kv66aef1L17d1WrVk2nT5/Whx9+qCpVqlgXeffo0UPz58/X7bffru7du+vw4cOaOnWq6tatq3PnzhU6Zs2aNdW2bVuNGDFCubm5eueddxQeHq7Ro0cX2Y8aNWrolVde0ZgxY3TkyBH17t1bgYGBOnz4sBYsWKD777/f5snWy5Ytk5+f35/m60r9+vXTk08+qSeffFJhYWGF1gLcd999SklJ0U033aQqVaro6NGjeu+999S4cWPr+pWEhATVqVNHgwcPtnkeSEmbPXu2pk6dqt69e+u6665TZmamli5dqmXLlqlnz542C43vvvturVu3TsOGDdPu3bttnm0QEBCg3r17O63fAMoBV9wiCfin6tmzp+Hj42NkZWUVGTNkyBDD09PTSE5ONgzDMM6ePWuMHDnSqFy5suHl5WVUqVLFGDx4sPV1w7hwi9GxY8ca1atXNzw9PY3o6GijT58+xsGDB60xZ86cMe68807Dz8/PCA0NNR544AFjx44ddm9l6u/vb7dvu3btMjp37mwEBAQYERERxvDhw42tW7cWOoZhGMaOHTuM22+/3QgJCTF8fHyMWrVqGc8991yhY+bm5hqhoaFGcHCwcf78+eKk0TAMwzh48KDRp08f6/FbtmxpLFq0qFCcHLyV6V/FXrodZ1G3Aa1WrZpDtzLt0KHDX/Zr0qRJNrfgvOTs2bPGPffcYwQFBRnBwcHGPffcY2zevPkvb2W6YsUKo1evXkalSpUMLy8vo1KlSsbAgQONffv2WWMsFovx6quvGtWqVTO8vb2NJk2aGIsWLTIGDx5sVKtWzRp36Vamb7zxhvHWW28ZMTExhre3t9GuXTtj69atdnP3R/PmzTPatm1r+Pv7G/7+/kbt2rWNhx9+2Ni7d69NXKtWrYx//etff5mvK7Vp08aQZNx3332FXvv666+NLl26GJGRkYaXl5dRtWpV44EHHjBOnTpV6PquvB1pUTp06GDUq1fvT2Mu3cr0q6+++tO49evXG3379jWqVq1qeHt7G/7+/kbTpk2NSZMmGfn5+Tax1apVK/Ln68rvFQBcCybDsFPXBIBrpKCgQJUqVVLPnj318ccfu7o7pVJ6erquu+46vf7669Y7O5U3W7ZsUdOmTbVp06ZCD2ADADhP6avNA/hH+eabb3TmzJliP8G4PAoODtbo0aP1xhtvXLM7BZU1r732mvr06cPAAABcjMoBgBKxdu1abdu2TS+//LIiIiLsPlgLAACULlQOAJSIKVOmaMSIEYqMjNSnn37q6u4AAIBioHIAAAAAQBKVAwAAAAAXMTgAAAAAIInBAQAAAICLGBwAAAAAkMTgAAAAAMBFDA5KADeAQkk6deqUdu3a5epulClms1kSv5uOyM7OVl5enqu7UeacOHFCmzdvdnU3AOCqMTi4RrKyspSZmamMjAyZTCZXd6dMSElJ0Z49e7R//37ehBRTQkKCGjRooGeffVYbNmxwdXfKhC1btqh3797Kzs7md7OYduzYoX79+un3339Xbm6uq7tTZuzcuVOtW7fW7NmzJancPu26uE6cOKEvv/xS8+fP1/bt213dHQAXMTi4Bnbt2qU77rhDHTp0UJ06dTRnzhxJfEr5Z3bs2KHOnTurX79+atCggV5//XXrp7so2v79+5Wenq709HS99957Nk8d5uetsK1bt6p169aqV6+e/Pz8rO3kqmg7d+5Uu3btVKVKFVWvXl3e3t6u7lKZsHXrVrVs2VIeHh767LPPdPr0abm58V9sUbZv3662bdvqjTfe0EMPPaSxY8fq4MGDru4WADE4+Nt27dql9u3bq169enryySc1YMAADR06VFu2bOFTyiLs2rVLHTt2VKdOnTR37lyNHz9e48aN08mTJ13dtVKvYcOGuvXWW9W/f3/t2LFDkyZN0s6dOyXxhvePtm3bpjZt2mjkyJF67bXXrO15eXn8bhYhKytLo0aN0sCBAzV16lTFxMRoz5492rJli44dO+bq7pVaW7duVXx8vB5//HGtW7dO4eHhmj59ugzD4PfSjqNHj6pbt24aOHCgVq9erU8++UTr16/X2bNnXd01AOIJyX9LSkqKBg4cqNq1a2vy5MnW9htvvFENGjTQu+++K8MweCNyheTkZN15551q0qSJ3nnnHUkX3tTeeuutGjdunHx9fRUeHq6YmBjXdrQUMpvNSklJUdu2bbVy5UqtW7dOEyZMUOPGjbVz505VrFhRX3/9tau7WSokJiaqSZMmatSokZYsWSKz2awnn3xS+/fv18GDB/XAAw/olltuUe3atV3d1VIlNzdXnTt31rvvvquGDRuqe/fu1ul/9erV03333ad7773X1d0sVbZt26aWLVvq3//+t8aPHy+LxaL+/fvr6NGjWrdunSTx/8AfTJs2TZ9//rlWrlxpzUv37t3Vq1cv+fj4KCYmRjfeeKOLewmUXx6u7kBZlp+fr7S0NPXp00fShfmlbm5uql69ulJSUiSJ/xD+wGQy6ZZbbrHmTJJeeeUVLV26VImJiUpOTla9evX07LPPqm3bti7saenj5uamChUqqEWLFtqxY4duv/12eXt7a/DgwcrNzdXw4cNd3cVSJT4+XsePH9e3336rqVOnKj8/X40bN1ZsbKzeffdd7dixQ+PGjVPVqlVd3dVSIy0tTXv37lVycrL+7//+T5L00Ucf6eTJk1q5cqWeffZZBQcH2/z+lne5ubkaPXq0XnrpJev/Aa+88opatWqlKVOmaMSIEfw/8AeGYejYsWPasmWLmjRpovHjx+v7779XXl6e0tPTdfToUU2cOFFDhgxxdVeBcolpRX9DVFSUZs+erXbt2km6fEeUypUrF5preu7cOaf3rzQKDw/XyJEjdf3110uS5s6dq+eff15z587VihUrNGfOHKWkpGjFihUu7mnpc+kNhru7u1avXi1Jmj9/vsxms2JiYvTzzz9bP6ks76Kjo/XBBx+obt26GjhwoMxms7744gu9+eabev/99/XKK69o3rx51ilZuCAyMlKdOnXSwoULtX//fj3xxBNq2LChbrnlFj366KPq3LmzVqxYIbPZzHSZi1q0aKGXXnpJ0oUBvGEYio6O1o033qjVq1eTKzu6dOmi6Oho9evXT3369NFzzz2nBQsW6IcfftCiRYs0YMAAzZo1S2fPniV3gAtQOfibLr3JtVgs8vT0lHThU5HTp09bYyZMmCBvb289+uij8vAg5YGBgda/x8fHa8OGDWratKkkqX379oqMjNTGjRtd1b1S69LUhJtuukmHDx/WQw89pO+++04bN27Uli1b9H//93/y8vJSw4YN5ePj4+ruulzFihU1YcIEVa5cWZ07d1Z4eLg1h3fddZeef/55rVq1St26dXN1V0sNk8mkf//73+rYsaOys7N1//33W1+rUqWKoqKitH79erm5ufFpeBFMJpOCg4N1zz33qE+fPnr00UfVpk0bV3erVKlevbpmz56t9evXa9euXTKZTOrVq5ekCwPUSpUq6ccff5S/vz8/Z4AL8E71Grn0idGlf8guVQ7GjRunV155RZs3b2ZgYEe1atVUrVo1SRcGWHl5eQoICFDDhg1d3LPS59LPVvXq1TV06FBFRUVp0aJFql69uqpXry6TyaRGjRoxMLhCpUqV9PTTT1tzYjKZZBiGUlJSVKFCBTVu3Ni1HSyFmjdvru+//14dOnTQtGnTdN1116levXqSLkyljIuLU0FBgfXDENjXo0cP3XzzzZoyZYqaNm0qX19fV3epVLn079ZHH32kDRs2KC8vT15eXpKkpKQkxcbGcgc7wEV4t3oNXRoceHh4KCYmRm+++aZef/11bdiwQY0aNXJ190o9Nzc3vfrqq1qzZo1efvllV3en1IqPj9dHH32k5s2bq2HDhtafu969e7u6a6VSUFCQzdcmk0nvvvuukpOT+US3CO3atdPq1as1cOBADRs2TA0aNFBeXp4WLlyoX375hYFBMXh5eenGG2/UhAkTlJ6ezuCgCK1bt9aTTz6pyZMnKzo6Wjt27NAnn3yin376Sf7+/q7uHlAuMTi4hi5VCzw9PTV9+nQFBQXpl19+sU6ZQdG++uor/fjjj5o7d66WLVtmna6Fwjw9PTVkyBDrzxtl9+KbO3euVq1apa+++korVqywVq1QWPv27bVy5UrNnj1bv//+u66//nr98ssvql+/vqu7VupdGrA/8MAD+vrrr5WTk+PqLpVadevW1YIFCzR8+HC5ubmpcuXK+vHHH9WgQQNXdw0ot7iVaQnYsGGDWrZsqR07dqhu3bqu7k6ZsHPnTr300kt64YUXVKdOHVd3B/9Q27Zt0zPPPKOJEydap8rgr1160i8P9XKMYRjKzs7mE/BiSElJUX5+vry9vRUSEuLq7gDlGoODEpKVlcV/CA7Kz89nugJK3JVzmwEAgC0GBwAAAAAk8ZwDAAAAABcxOAAAAAAgicEBAAAAgIsYHAAAAACQxOAAAAAAwEUMDkpIbm6uXnjhBeXm5rq6K2UGOXMcOXMcOXMcOXMcOXMcOXMcOUNJ4FamJSQjI0PBwcFKT09XUFCQq7tTJpAzx5Ezx5Ezx5Ezx5Ezx5Ezx5Gzf7affvpJb7zxhjZu3KhTp05pwYIF6t2795/us3r1ao0aNUo7d+5UTEyMnn32WQ0ZMsSh81I5AAAAAEqZrKwsNWrUSB988EGx4g8fPqzu3bvrxhtv1JYtW/T444/rvvvu09KlSx06r8fVdBYAAABAyenWrZu6detW7PipU6eqevXqeuuttyRJderU0S+//KK3335bXbt2LfZxyvTgwGKx6OTJkwoMDJTJZHJ1d2xkZGTY/Im/Rs4cR84cR84cR84cR84cR84cV1pzZhiGMjMzValSJbm5lb5JKjk5OcrLy3PJuQ3DKPSe1dvbW97e3n/72GvWrFHnzp1t2rp27arHH3/coeOU6cHByZMnFRMT4+pu/KnS3r/SiJw5jpw5jpw5jpw5jpw5jpw5rrTm7Pjx46pSpYqru2EjJydH1asFKPG02SXnDwgI0Llz52zann/+eb3wwgt/+9iJiYmKioqyaYuKilJGRobOnz8vX1/fYh2nTA8OAgMDJUltdas85Oni3gAAAKBA+fpF31nfp5UmeXl5Sjxt1tGNsQoKdG5VIyPTomrNjuj48eM2C8ivRdXgWirTg4NLZRkPecrDxOAAAADA5S7eB7O0Tfm+UkCgSQGBzu2fRRfOFxQUVCJ3l4qOjlZSUpJNW1JSkoKCgopdNZC4WxEAAABQ5sXHx2vFihU2bcuWLVN8fLxDxynTlQMAAADAUWbDIrOTn/RlNiwOxZ87d04HDhywfn348GFt2bJFYWFhqlq1qsaMGaOEhAR9+umnkqQHH3xQ77//vkaPHq1hw4Zp5cqV+vLLL7V48WKHzkvlAAAAAChlNmzYoCZNmqhJkyaSpFGjRqlJkyYaN26cJOnUqVM6duyYNb569epavHixli1bpkaNGumtt97SRx995NBtTCUqBwAAAECp07FjRxlG0eWNmTNn2t1n8+bNf+u8DA4AAABQrlhkyCLnzity9vmuFtOKAAAAAEiicgAAAIByxiKLHFsefG3OWRZQOQAAAAAgicEBAAAAgIuYVgQAAIByxWwYMv/JnYBK6pxlAZUDAAAAAJKoHAAAAKCc4VamRaNyAAAAAEASlQMAAACUMxYZMlM5sIvKAQAAAABJDA4AAAAAXMS0IgAAAJQrLEguGpUDAAAAAJKoHAAAAKCc4SFoRaNyAAAAAEASgwMAAAAAFzGtCAAAAOWK5eLm7HOWBVQOAAAAAEiicgAAAIByxuyCJyQ7+3xXi8oBAAAAAElUDgAAAFDOmI0Lm7PPWRZQOQAAAAAgicEBAAAAgIuYVgQAAIByhVuZFo3KAQAAAABJDA6K7baHuuq/hz7Q4uw5enfNq6rVouafxrfvc4M+3vWOFmfP0bStb6lltyaFYga/2F9zE6ZpUdYcTfzhOVWuGV1S3XcJcuY4cuY4cuY4cuY4cuY4cuY4cuY8FplkdvJmkcnVl10sLh0cTJgwQS1atFBgYKAiIyPVu3dv7d2715VdsqtDv9Z64K3Bmv3SVxrR7Ckd2nZUE5aMVUiFILvxdePj9Mxnj2vJjJUa0XS0fv12nV5YMFqx9WKsMf1H91LvR7pp8ohpeuSGMcrJytWEJc/K09vTWZdVosiZ48iZ48iZ48iZ48iZ48iZ48gZSguXDg5+/PFHPfzww/r999+1bNky5efnq0uXLsrKynJltwq584ke+v6jFVo6c7WO7T6hyQ9OU252nroOu8lu/O2Pdtf6JVv01ZsLdWxPgmaN+0IHNh1Sr5G3XI55rLvmjJ+nNQs36PD2Y5o4+H2FVwpVm94tnHVZJYqcOY6cOY6cOY6cOY6cOY6cOY6cobRw6eBgyZIlGjJkiOrVq6dGjRpp5syZOnbsmDZu3OjKbtnw8PRQXLPrtGn5NmubYRjatHyb6t4QZ3efuvFx2rRim03bhh+2qs7F+OjqkQqvGKrNy7dbX8/OyNaetQdUN75WCVyFc5Ezx5Ezx5Ezx5Ezx5Ezx5Ezx5Ez57MYrtnKglK15iA9PV2SFBYWZvf13NxcZWRk2GwlLTgiUO4e7kpNSrdpTz2drtDoELv7hEaHKO2P8UlpCrsYf+nP1KS0QjGhUfaPWZaQM8eRM8eRM8eRM8eRM8eRM8eRM5QmpWZwYLFY9Pjjj6tNmzaqX7++3ZgJEyYoODjYusXExNiNAwAAAIri7MXIl7ayoNQMDh5++GHt2LFDc+fOLTJmzJgxSk9Pt27Hjx8v8X6lJ2fKXGBWaFSwTXtoZLBSE9Ps7pOamKaQP8ZHhSjlYvylP/84cg+NCik0wi+LyJnjyJnjyJnjyJnjyJnjyJnjyBlKk1IxOBg5cqQWLVqkVatWqUqVKkXGeXt7KygoyGYraQX5Bdq38ZCadGpgbTOZTGrSqYF2/b7P7j671uxTk5sa2LQ17dxQuy/GJx4+rbOnUtWk0+UKiV+gr2q3qqlda0rf3ZocRc4cR84cR84cR84cR84cR84cR86cj8pB0Vw6ODAMQyNHjtSCBQu0cuVKVa9e3ZXdKdK8txfp1vs66eZBHVS1dmU9OmW4fPy9tfSTVZKk0TNHatird1njF7y7WC1uaaw+o3ooplYl3fN8X8U1r6Fv319yOWbyYt019k7F92yu2PpVNXrWSJ09mapfv1nv9OsrCeTMceTMceTMceTMceTMceTMceQMpYWHK0/+8MMP67PPPtO3336rwMBAJSYmSpKCg4Pl6+vryq7Z+PHL3xRSIUiDX+yv0OgQHdxyRM90G6+00xcWAkVWjZBxxRL0XWv2acLdkzXk5YEaOv4uJew/pRduf11Hdl6eBvXF69/Kx99Hj//nAQWE+GnHL3s0ptt45efmO/36SgI5cxw5cxw5cxw5cxw5cxw5cxw5Q2lhMgzDZTdWMpnsl1c++eQTDRky5C/3z8jIUHBwsDqqlzxMPNADAADA1QqMfK3Wt0pPT3fKFHBHXHrv+MuOSgoIdO4EmnOZFrWtf7JU5uVKLq0cuHBcAgAAAOAPXDo4AAAAAJzNFQuEWZAMAAAAoExhcAAAAABAEtOKAAAAUM6Y5Sazkz8jNzv1bFePygEAAAAASVQOAAAAUM4YhkkWw7kLhA0nn+9qUTkAAAAAIInBAQAAAICLmFYEAACAcoXnHBSNygEAAAAASVQOAAAAUM6YDTeZDSffytRw6umuGpUDAAAAAJKoHAAAAKCcscgki5M/I7eobJQOqBwAAAAAkMTgAAAAAMBFTCsCAABAucKtTItG5QAAAACAJCoHAAAAKGdccytTFiQDAAAAKEMYHAAAAACQxLQiAAAAlDMXnnPg3AXCzj7f1aJyAAAAAEASlQMAAACUMxa5ycwTku2icgAAAABAEpUDAAAAlDPcyrRoVA4AAAAASGJwAAAAAOAiphUBAACgXLHITRYWJNtF5QAAAACAJCoHAAAAKGfMhklmw7kPJXP2+a4WlQMAAAAAkhgcAAAAALiIaUUAAAAoV8wueEKymQXJAAAAAMoSKgcAAAAoVyyGmyxOfkKyhSckAwAAAChLqBwAAACgXGHNQdGoHAAAAACQxOAAAAAAwEVMKwIAAEC5YpHzn1hscerZrh6VAwAAAACSqBwAAACgnLHITRYnf0bu7PNdrbLRSwAAAAAljsEBAAAAAElMKwIAAEA5YzbcZHbyE5Kdfb6rVTZ6CQAAAKDEUTkAAABAuWKRSRY5+1amzj3f1aJyAAAAAEASlQMAAACUM6w5KFrZ6CUAAACAEsfgAAAAAIAkphUBAACgnDHLTWYnf0bu7PNdrbLRSwAAAAAljsoBAAAAyhWLYZLFcPKtTJ18vqtF5QAAAACAJAYHAAAAAC5iWhEAAADKFYsLFiRbyshn8mWjlwAAAABKHJUDAAAAlCsWw00WJz+x2Nnnu1plo5cAAAAAShyVAwAAAJQrZplklnNvLers810tKgcAAAAAJDE4AAAAAHAR04oAAABQrrAguWhlo5cAAAAAShyDg2K67aGu+u+hD7Q4e47eXfOqarWo+afx7fvcoI93vaPF2XM0betbatmtSaGYwS/219yEaVqUNUcTf3hOlWtGl1T3XYKcOY6cOY6cOY6cOY6cOY6cOY6cOY9ZlxclO28rG1w6OJgyZYoaNmyooKAgBQUFKT4+Xt9//70ru2RXh36t9cBbgzX7pa80otlTOrTtqCYsGauQCkF24+vGx+mZzx7XkhkrNaLpaP367Tq9sGC0YuvFWGP6j+6l3o900+QR0/TIDWOUk5WrCUuelae3p7Muq0SRM8eRM8eRM8eRM8eRM8eRM8eRM5QWLh0cVKlSRa+99po2btyoDRs26KabblKvXr20c+dOV3arkDuf6KHvP1qhpTNX69juE5r84DTlZuep67Cb7Mbf/mh3rV+yRV+9uVDH9iRo1rgvdGDTIfUaecvlmMe6a874eVqzcIMObz+miYPfV3ilULXp3cJZl1WiyJnjyJnjyJnjyJnjyJnjyJnjyBlKC5cODnr27Klbb71V119/veLi4jR+/HgFBATo999/d2W3bHh4eiiu2XXatHybtc0wDG1avk11b4izu0/d+DhtWrHNpm3DD1tV52J8dPVIhVcM1ebl262vZ2dka8/aA6obX6sErsK5yJnjyJnjyJnjyJnjyJnjyJnjyJnzXVqQ7OytLCg1vTSbzZo7d66ysrIUHx9vNyY3N1cZGRk2W0kLjgiUu4e7UpPSbdpTT6crNDrE7j6h0SFK+2N8UprCLsZf+jM1Ka1QTGiU/WOWJeTMceTMceTMceTMceTMceTMceQMpYnLb2W6fft2xcfHKycnRwEBAVqwYIHq1q1rN3bChAl68cUXndxDAAAA/JOYDTeZnfxJvrPPd7Vc3statWppy5YtWrt2rUaMGKHBgwdr165ddmPHjBmj9PR063b8+PES7196cqbMBWaFRgXbtIdGBis1Mc3uPqmJaQr5Y3xUiFIuxl/6848j99CokEIj/LKInDmOnDmOnDmOnDmOnDmOnDmOnKE0cfngwMvLSzVr1lSzZs00YcIENWrUSJMnT7Yb6+3tbb2z0aWtpBXkF2jfxkNq0qmBtc1kMqlJpwba9fs+u/vsWrNPTW5qYNPWtHND7b4Yn3j4tM6eSlWTTvWtr/sF+qp2q5ratWZvCVyFc5Ezx5Ezx5Ezx5Ezx5Ezx5Ezx5Ez5zNkksXJmyGTqy+7WFw+OPgji8Wi3NxcV3fDxry3F+nW+zrp5kEdVLV2ZT06Zbh8/L219JNVkqTRM0dq2Kt3WeMXvLtYLW5prD6jeiimViXd83xfxTWvoW/fX3I5ZvJi3TX2TsX3bK7Y+lU1etZInT2Zql+/We/06ysJ5Mxx5Mxx5Mxx5Mxx5Mxx5Mxx5AylhUvXHIwZM0bdunVT1apVlZmZqc8++0yrV6/W0qVLXdmtQn788jeFVAjS4Bf7KzQ6RAe3HNEz3cYr7fSFhUCRVSNkWAxr/K41+zTh7ska8vJADR1/lxL2n9ILt7+uIzsvT4P64vVv5ePvo8f/84ACQvy045c9GtNtvPJz851+fSWBnDmOnDmOnDmOnDmOnDmOnDmOnMGeDz74QG+88YYSExPVqFEjvffee2rZsmWR8e+8846mTJmiY8eOKSIiQn369NGECRPk4+NT7HOaDMMw/jqsZNx7771asWKFTp06peDgYDVs2FBPPfWUbr755mLtn5GRoeDgYHVUL3mYeKAHAACAqxUY+Vqtb5Wenu6UKeCOuPTe8f9+6y7vAOe+d8w9l683Wi8udl6++OILDRo0SFOnTlWrVq30zjvv6KuvvtLevXsVGRlZKP6zzz7TsGHDNGPGDLVu3Vr79u3TkCFDNGDAAE2aNKnY/XRp5eDjjz925ekBAACAUmnSpEkaPny4hg4dKkmaOnWqFi9erBkzZujpp58uFP/bb7+pTZs2uuuuC9PPYmNjNXDgQK1du9ah85a6NQcAAABASbIYJpdskgo9s8veWtu8vDxt3LhRnTt3tra5ubmpc+fOWrNmjd1rat26tTZu3Kh169ZJkg4dOqTvvvtOt956q0O5YXAAAAAAOElMTIyCg4Ot24QJEwrFJCcny2w2KyoqyqY9KipKiYmJdo9711136aWXXlLbtm3l6empGjVqqGPHjnrmmWcc6p/LH4IGAAAAlBfHjx+3WXPg7e19TY67evVqvfrqq/rwww/VqlUrHThwQI899phefvllPffcc8U+DoMDAAAAlCtmucns5Ak0l85XnGd1RUREyN3dXUlJSTbtSUlJio6OtrvPc889p3vuuUf33XefJKlBgwbKysrS/fffr7Fjx8rNrXjXy7QiAAAAoBTx8vJSs2bNtGLFCmubxWLRihUrFB8fb3ef7OzsQgMAd3d3SZIjNyelcgAAAIBy5coFws48pyNGjRqlwYMHq3nz5mrZsqXeeecdZWVlWe9eNGjQIFWuXNm6ZqFnz56aNGmSmjRpYp1W9Nxzz6lnz57WQUJxMDgAAAAASpn+/fvrzJkzGjdunBITE9W4cWMtWbLEukj52LFjNpWCZ599ViaTSc8++6wSEhJUoUIF9ezZU+PHj3fovC59CNrfxUPQAAAASpey8BC0R3/p5ZKHoL3btnTm5UpUDgAAAFCuWOQmi5OX3jr7fFerbPQSAAAAQImjcgAAAIByxWyYZHbygmRnn+9qUTkAAAAAIInKAQAAAMqZsnArU1ehcgAAAABAEoMDAAAAABcxrQgAAADlimG4yWI49zNyw8nnu1plo5cAAAAAShyVAwAAAJQrZplklpNvZerk810tKgcAAAAAJDE4AAAAAHAR04oAAABQrlgM5z93wGI49XRXjcoBAAAAAElUDgAAAFDOWFxwK1Nnn+9qlY1eAgAAAChxVA4AAABQrlhkksXJtxZ19vmuFpUDAAAAAJIYHAAAAAC4iGlFAAAAKFfMhklmJ9/K1Nnnu1pUDgAAAABIonIAAACAcoZbmRatbPQSAAAAQIljcAAAAABAEtOKAAAAUM5YZJLFyQuEec4BAAAAgDKFygEAAADKFcMFT0g2qBwAAAAAKEuoHAAAAKBcsRguWHPAQ9AAAAAAlCUMDgAAAABIYloRAAAAyhmekFy0stFLAAAAACWOygEAAADKFRYkF43KAQAAAABJDA4AAAAAXMS0IgAAAJQrFhc8IdnZ57taVA4AAAAASKJyAAAAgHKGBclFo3IAAAAAQBKVAwAAAJQzVA6KRuUAAAAAgCQGBwAAAAAuYloRAAAAyhWmFRWNygEAAAAASVQOAAAAUM5QOSgalQMAAAAAkhgcAAAAALiIaUUAAAAoVwxJFjl3mo/h1LNdPSoHAAAAACRROQAAAEA5w4LkolE5AAAAACCJygEAAADKGSoHRaNyAAAAAEASgwMAAAAAFzGtCAAAAOUK04qKRuUAAAAAgCQGB8V220Nd9d9DH2hx9hy9u+ZV1WpR80/j2/e5QR/vekeLs+do2ta31LJbk0Ixg1/sr7kJ07Qoa44m/vCcKteMLqnuuwQ5cxw5cxw5cxw5cxw5cxw5cxw5c55LlQNnb2VBqRkcvPbaazKZTHr88cdd3ZVCOvRrrQfeGqzZL32lEc2e0qFtRzVhyViFVAiyG183Pk7PfPa4lsxYqRFNR+vXb9fphQWjFVsvxhrTf3Qv9X6kmyaPmKZHbhijnKxcTVjyrDy9PZ11WSWKnDmOnDmOnDmOnDmOnDmOnDmOnKG0KBWDg/Xr1+s///mPGjZs6Oqu2HXnEz30/UcrtHTmah3bfUKTH5ym3Ow8dR12k9342x/trvVLtuirNxfq2J4EzRr3hQ5sOqReI2+5HPNYd80ZP09rFm7Q4e3HNHHw+wqvFKo2vVs467JKFDlzHDlzHDlzHDlzHDlzHDlzHDlDaeHywcG5c+d09913a/r06QoNDXV1dwrx8PRQXLPrtGn5NmubYRjatHyb6t4QZ3efuvFx2rRim03bhh+2qs7F+OjqkQqvGKrNy7dbX8/OyNaetQdUN75WCVyFc5Ezx5Ezx5Ezx5Ezx5Ezx5Ezx5Ez5zMMk0u2ssDlg4OHH35Y3bt3V+fOnf8yNjc3VxkZGTZbSQuOCJS7h7tSk9Jt2lNPpys0OsTuPqHRIUr7Y3xSmsIuxl/6MzUprVBMaJT9Y5Yl5Mxx5Mxx5Mxx5Mxx5Mxx5Mxx5AyliUtvZTp37lxt2rRJ69evL1b8hAkT9OKLL5ZwrwAAAPBPZpFJFjn5VqZOPt/Vclnl4Pjx43rsscc0Z84c+fj4FGufMWPGKD093bodP368hHsppSdnylxgVmhUsE17aGSwUhPT7O6TmpimkD/GR4Uo5WL8pT//OHIPjQopNMIvi8iZ48iZ48iZ48iZ48iZ48iZ48gZShOXDQ42btyo06dPq2nTpvLw8JCHh4d+/PFHvfvuu/Lw8JDZbC60j7e3t4KCgmy2klaQX6B9Gw+pSacG1jaTyaQmnRpo1+/77O6za80+NbmpgU1b084NtftifOLh0zp7KlVNOtW3vu4X6KvarWpq15q9JXAVzkXOHEfOHEfOHEfOHEfOHEfOHEfOnI9bmRbNZYODTp06afv27dqyZYt1a968ue6++25t2bJF7u7urupaIfPeXqRb7+ukmwd1UNXalfXolOHy8ffW0k9WSZJGzxypYa/eZY1f8O5itbilsfqM6qGYWpV0z/N9Fde8hr59f8nlmMmLddfYOxXfs7li61fV6FkjdfZkqn79pnhTrEo7cuY4cuY4cuY4cuY4cuY4cuY4cobSwmVrDgIDA1W/fn2bNn9/f4WHhxdqd7Ufv/xNIRWCNPjF/gqNDtHBLUf0TLfxSjt9YSFQZNUIGRbDGr9rzT5NuHuyhrw8UEPH36WE/af0wu2v68jOy9Ogvnj9W/n4++jx/zyggBA/7fhlj8Z0G6/83HynX19JIGeOI2eOI2eOI2eOI2eOI2eOI2coLUyGYRh/HeYcHTt2VOPGjfXOO+8UKz4jI0PBwcHqqF7yMPFADwAAAFcrMPK1Wt8qPT3dKVPAHXHpvWPLBY/Jw9/bqecuyMrVutsnl8q8XMmldyv6o9WrV7u6CwAAAEC5VaoGBwAAAEBJc8UCYRYkAwAAAChTGBwAAAAAkMS0IgAAAJQzhmGS4eRpPs4+39WicgAAAABAEpUDAAAAlDOGCxYkUzkAAAAAUKYwOAAAAAAgiWlFAAAAKGcMSYbh/HOWBVQOAAAAAEiicgAAAIByxiKTTHLyE5KdfL6rReUAAAAAgCQqBwAAAChneAha0agcAAAAAJDE4AAAAADARUwrAgAAQLliMUwyOXmaj7OfyHy1qBwAAAAAkETlAAAAAOWMYbjgIWhl5CloVA4AAAAASGJwAAAAAOAiphUBAACgXOE5B0WjcgAAAABAEpUDAAAAlDNUDopG5QAAAACAJCoHAAAAKGd4CFrRqBwAAAAAkMTgAAAAAMBFTCsCAABAucITkotG5QAAAACAJCoHAAAAKGcuVA6cfStTp57uqlE5AAAAACCJygEAlBqWto1d3YUyxyMjx9VdKJMKgnxc3YUyx+tEiqu7UHZYcqWjru4ErhaDAwAAAJQrPCG5aEwrAgAAACCJygEAAADKGePi5uxzlgVUDgAAAABIonIAAACAcoY1B0WjcgAAAABAEoMDAAAAABcxrQgAAADlCyuSi0TlAAAAAIAkKgcAAAAob1ywIFksSAYAAABQljA4AAAAAEqhDz74QLGxsfLx8VGrVq20bt26P41PS0vTww8/rIoVK8rb21txcXH67rvvHDon04oAAABQrhjGhc3Z53TEF198oVGjRmnq1Klq1aqV3nnnHXXt2lV79+5VZGRkofi8vDzdfPPNioyM1Ndff63KlSvr6NGjCgkJcei8DA4AAACAUmbSpEkaPny4hg4dKkmaOnWqFi9erBkzZujpp58uFD9jxgylpKTot99+k6enpyQpNjbW4fMyrQgAAADlyqUnJDt7k6SMjAybLTc3t1D/8vLytHHjRnXu3Nna5ubmps6dO2vNmjV2r2nhwoWKj4/Xww8/rKioKNWvX1+vvvqqzGazQ7lhcAAAAAA4SUxMjIKDg63bhAkTCsUkJyfLbDYrKirKpj0qKkqJiYl2j3vo0CF9/fXXMpvN+u677/Tcc8/prbfe0iuvvOJQ/5hWBAAAgPLFMDn/1qIXz3f8+HEFBQVZm729va/J4S0WiyIjIzVt2jS5u7urWbNmSkhI0BtvvKHnn3++2MdhcAAAAAA4SVBQkM3gwJ6IiAi5u7srKSnJpj0pKUnR0dF296lYsaI8PT3l7u5ubatTp44SExOVl5cnLy+vYvWPaUUAAABAKeLl5aVmzZppxYoV1jaLxaIVK1YoPj7e7j5t2rTRgQMHZLFYrG379u1TxYoViz0wkBgcAAAAoJy5dCtTZ2+OGDVqlKZPn65Zs2Zp9+7dGjFihLKysqx3Lxo0aJDGjBljjR8xYoRSUlL02GOPad++fVq8eLFeffVVPfzwww6dl2lFAAAAQCnTv39/nTlzRuPGjVNiYqIaN26sJUuWWBcpHzt2TG5ulz/nj4mJ0dKlS/XEE0+oYcOGqly5sh577DE99dRTDp2XwQEAAADKF+Pi5uxzOmjkyJEaOXKk3ddWr15dqC0+Pl6///674ye6AtOKAAAAAEhicAAAAADgIqYVAQAAoFy58onFzjxnWUDlAAAAAIAkKgcAAAAoj5y9ILmMoHIAAAAAQBKVAwAAAJQzrDkoGpUDAAAAAJIYHAAAAAC4iGlFAAAAKF/KyBOSXYHKAQAAAABJVA6K7baHuqrvk7cpLDpEB7ce1QePztDe9QeKjG/f5wYNfmmAomMrKGF/oj56erbWfb/ZJmbwi/3V7b5OCgjx185f9+jdh6Yr4UBiSV+K05Azx5Ezx5Ezx/W6vZn6DbhBYWEBOngwSe9N/kF7d5+0G1stNkJD7u2guLhoRVcM0Qfv/aD5X623iXFzM2nQ0Pbq3KW+wsL8dTb5nJZ+v02zP/3FGZfjFD37t1SfwW0VFh6gQ/sS9eHExdq7I8FubLUakRo04ibVrFtJ0ZVCNfWN77Rgzpoij91vaDvd+1gXLZjzm6a+8X1JXYLT8XPmuB73tFGf4R0VWiFQh3af1JQXFmjftuNFxrft1lCDRnVTVJVQJRxJ1icTF2n96j3W10MiAjRsdA81bRcn/yBf7Vh3SFNeXKCTR5KdcTmlnOni5uxzln4urRy88MILMplMNlvt2rVd2SW7OvRrrQfeGqzZL32lEc2e0qFtRzVhyViFVAiyG183Pk7PfPa4lsxYqRFNR+vXb9fphQWjFVsvxhrTf3Qv9X6kmyaPmKZHbhijnKxcTVjyrDy9PZ11WSWKnDmOnDmOnDmu40119ODDnfXpzJ/14H0f6+CB05r45gCFhPjZjffx8dSpk6n66D+rdPbsObsxA+6K1229muq9t5dq6D3/0fSpK9X/rht0+53NS/JSnKZDl/q6/9/dNOc/q/TwwCk6tC9R4z8crOBQf7vx3j6eOpWQqhmTl+nsmcw/PXZcvcrq3qeFDu395ww+JX7Orkb77o11/zO3ac67P+iRnm/r8O6TemXW/QoOD7AbX6dprJ6e/C8t/XKtRvaYpDU/7NBzU4eqWly0NWbc1KGKrhqmlx74RCN7TNLphFS9+t8H5O3r5azLQhnk8mlF9erV06lTp6zbL7+Uvk8A7nyih77/aIWWzlytY7tPaPKD05Sbnaeuw26yG3/7o921fskWffXmQh3bk6BZ477QgU2H1GvkLZdjHuuuOePnac3CDTq8/ZgmDn5f4ZVC1aZ3C2ddVokiZ44jZ44jZ47r06+Vvlu0RUu/36ajR5P1zlvfKTenQLd0b2Q3fu+eU5o2ZaVWrdyl/LwCuzH16lfRb7/u09rfDygpMV0//bhHG9YfVu06lUryUpzmjntaa8n8Dfrh2806duiM3n3lf8rNyVfX3k3txu/bmaCP3l6qH5duV36+/ZxJko+vl556tY/eeekbZWaeL6nuuwQ/Z467/d72+v6L37Xs6/U6diBJ7z07T7nn89Wlb0u78b2GtNOGn/Zq3vTVOn7wtP779hId3JmgnoPaSJIqV49Qnaaxev+5edq37bgSDp/R+8/Nk7e3pzr2bOLMS0MZ4/LBgYeHh6Kjo61bRESEq7tkw8PTQ3HNrtOm5dusbYZhaNPybap7Q5zdferGx2nTim02bRt+2Ko6F+Ojq0cqvGKoNi/fbn09OyNbe9YeUN34WiVwFc5FzhxHzhxHzhzn4eGmuLiK2rThsLXNMKRNGw+rbr0qV33cnTtOqEnTWFWpEiZJuq5GpBo0qKJ1aw/+7T67moeHu66vU0mb1h6ythmGoc1rD6puw5g/2fOvjXymh9b9vE+brzj2PwE/Z47z8HTX9fWraMuv+61thmFoy6/7VKdJNbv71GlaTVt+3WfTtvHnvarTJFaS5Ol1YeZ4fu7lwZZhGMrPM6te8+rX+ArKIMNFWxng8jUH+/fvV6VKleTj46P4+HhNmDBBVatWtRubm5ur3Nxc69cZGRkl3r/giEC5e7grNSndpj31dLpiale2u09odIjS/hiflKaw6BBJsv6ZmpRWKCY0KuRadNulyJnjyJnjyJnjgoP95O7hptTULJv21JQsxVQNv+rjfj7nN/n5e+uT2Q/KYrHIzc1NM6av1oplO/9ul10uKNRP7h7uSvvDVJfUs+cUE3v1H2Z16NpANWtX0iN3T/27XSx1+DlzXFCo/4V/z5Jtp6GlJp9TlRqRdvcJjQhUavIffi6TMxVaIVCSdPzgaSUlpGjI/92q98Z+rZzzebp9WHtVqBSisEj7Uy8BycWDg1atWmnmzJmqVauWTp06pRdffFHt2rXTjh07FBgYWCh+woQJevHFF13QUwBAUTreWFedbq6vV1/6RkeOnFGNmlF6+JGbdfZspn5Ysv2vD1DOVIgK0ojRt2rMgzOLnEKDwvg5c4y5wKJXRszS46/101dbXpG5wKzNv+7X+tW7Xd210oFbmRbJpYODbt26Wf/esGFDtWrVStWqVdOXX36pe++9t1D8mDFjNGrUKOvXGRkZion5e2Xdv5KenClzgVmhUcE27aGRwUpNTLO7T2pimkL+GB8VopSL8Zf+vLLt0tcHtx65Rj13HXLmOHLmOHLmuPT0bJkLLAr9w0La0DB/paRkFbHXX7v/oU6aO+c3rVq5S5J0+NAZRUUHa+Ddrcv8m7aM1GyZC8wK+cOi0NDwgEKf2hZXzbqVFRoeoA8+H2Ftc/dwV4Om1XRb/1bq0fJFWSxl5F2EHfycOS4jNevCv2cRth+MhkYEKLWIRe2pyZkKjfjDz2VEoE38gR0nNLLHJPkF+sjT013pKVl6e/6j2r/9xLW/CPxjuHzNwZVCQkIUFxenAwfs34bQ29tbQUFBNltJK8gv0L6Nh9SkUwNrm8lkUpNODbTr931299m1Zp+a3NTApq1p54bafTE+8fBpnT2Vqiad6ltf9wv0Ve1WNbVrzd4SuArnImeOI2eOI2eOKyiwaN++U2rSLNbaZjJJTZrGatfOq3+z4OPtUejNrMVsyM2tbNy2788UFJi1f/dJNWl5nbXNZDKpccvrtOtPbjH5Z7asPaj773xPI/p/aN327jyhld9t04j+H5bpgYHEz9nVKMg3a/+OE2rc+nprm8lkUuPW12v35qN299m96ahNvCQ1aROn3ZuPFIrNzsxRekqWKsVG6PoGMfp92Y5r2v8yyTC5ZisDXL7m4Ernzp3TwYMHdc8997i6Kzbmvb1Io2c+rH0bDmrvugO6/fHu8vH31tJPVkmSRs8cqeSTKZrxzGeSpAXvLtZbq19Un1E9tHbxJnUc0EZxzWvonQf+Yz3mgsmLddfYO5WwP1GnDp/WkJf66+zJVP36zXq7fShryJnjyJnjyJnjvv5yrZ4ac5v27T2lPbtP6s6+LeXj66ml311YqP3UMz2VnJypj6etlnRhcWm12AoX/u7proiIQNWoGaXz5/N0MiFVkrTmt/26+542Op2UoSNHzqjm9dHq07+llny31SXXeK3N/+9vevLlO7RvV4L27kjQ7XfHy8fXSz98u0mS9H8v36nk0xn65L1lki4sYq5a40LOPD3cFR4ZpOtqRSsnO08nj6fofHaejh48bXOOnPP5ykzPLtReVvFz5rgFH/+kf785QPu3H9fercfUe2h7eft5adnX6yRJ/35zoM4mpWvmG99Jkr6d+bNe//wh3XFvB61btVsdejbW9Q2q6N2xX1mP2bZbQ6WnZOnMyVTF1qqoB8f11pplO7TpF/sfoACSiwcHTz75pHr27Klq1arp5MmTev755+Xu7q6BAwe6sluF/PjlbwqpEKTBL/ZXaHSIDm45ome6jVfa6QsLGyOrRsi44tOMXWv2acLdkzXk5YEaOv4uJew/pRduf11Hdl7+lOmL17+Vj7+PHv/PAwoI8dOOX/ZoTLfxys/Nd/r1lQRy5jhy5jhy5rjVK3crOMRfQ4Z1UGiYvw4eSNLTT861Lh6NjAqWYVzOWXhEoKbNuM/6df+B8eo/MF5bNh/Vvx+bLUl6750fNPS+Dnps1C0KCfXT2eRzWrRws/4782fnXlwJ+fGHHQoO9degEZ0UGhGgQ3tPaexDnyrt4hSZChWDZTEs1vjwyEBN+eJh69d9B7dV38FttXXDYY2+b4bT++8K/Jw57qfFWxQc5q9/PdFVYRFBOrg7Qc8Nma60i9PXIiuF2Px7tnvTEU18fLYG/7ubhjx5qxKOnNHLD36io/suPzMjLDJI94/tpZCIAKWcydCK+Rv1+fvLnH5tKFtMxpW/nU42YMAA/fTTTzp79qwqVKigtm3bavz48apRo0ax9s/IyFBwcLA6qpc8TP+MBxQBKL8sbRu7ugtljkdGjqu7UCYVBPm4ugtljteJFFd3ocwosORq+dEPlJ6e7pQp4I649N6xyvsvys3Xub8HlvM5OjHy+VKZlyu5tHIwd+5cV54eAAAAwBVK1ZoDAAAAoMRxK9Milaq7FQEAAABwHQYHAAAAACQxrQgAAADljSueO1BGnnNA5QAAAACAJCoHAAAAKGdMxoXN2ecsC6gcAAAAAJDE4AAAAADARUwrAgAAQPnCcw6KROUAAAAAgCQqBwAAAChvuJVpkagcAAAAAJBE5QAAAADlDWsOivS3Kwdms1lbtmxRamrqtegPAAAAABdxeHDw+OOP6+OPP5Z0YWDQoUMHNW3aVDExMVq9evW17h8AAAAAJ3F4cPD111+rUaNGkqT//e9/Onz4sPbs2aMnnnhCY8eOveYdBAAAAK4pw0VbGeDw4CA5OVnR0dGSpO+++059+/ZVXFychg0bpu3bt1/zDgIAAABwDocHB1FRUdq1a5fMZrOWLFmim2++WZKUnZ0td3f3a95BAAAA4JqiclAkh+9WNHToUPXr108VK1aUyWRS586dJUlr165V7dq1r3kHAQAAADiHw4ODF154QfXr19fx48fVt29feXt7S5Lc3d319NNPX/MOAgAAAHAOhwcHn376qfr3728dFFwycOBAzZ0795p1DAAAACgRPCG5SA6vORg6dKjS09MLtWdmZmro0KHXpFMAAAAAnM/hyoFhGDKZCo98Tpw4oeDg4GvSKQAAAKCkmIwLm7PPWRYUe3DQpEkTmUwmmUwmderUSR4el3c1m806fPiwbrnllhLpJAAAAICSV+zBQe/evSVJW7ZsUdeuXRUQEGB9zcvLS7GxsbrzzjuveQcBAACAa8oVtxb9p1UOnn/+eUlSbGys+vfvLx8fnxLrFAAAAADnc3jNweDBg0uiHwAAAABcrFiDg7CwMO3bt08REREKDQ21uyD5kpSUlGvWOQAAAADOU6zBwdtvv63AwEBJ0jvvvFOS/QEAAADgIsUaHFw5lYhpRQAAACjLTHLBrUyde7qr5vCagysZhqFVq1bp/Pnzat26tUJDQ69VvwAAAAA4WbEHB2lpaXrssce0adMm3XDDDXrrrbd066236rfffpMkRUZG6ocfflDDhg1LrLMAyg6P2Kqu7kKZs+dO7gLnqG5td7u6CygnYnxYU1lcOefytfwGV/cCV8utuIFPPvmk1qxZowEDBmj79u265ZZbZDabtWbNGq1du1Z16tTR2LFjS7KvAAAAwN9nmFyzlQHFrhx8//33+uyzz9ShQwcNGTJEMTExWrlypVq1aiVJmjhxom677bYS6ygAAACAklXswUFSUpLi4uIkSZUrV5aPj49iYmKsr1etWlVnzpy59j0EAAAAriWekFykYk8rslgscnd3t37t7u5u87yDP3v2AQAAAIDSz6G7FX300UcKCAiQJBUUFGjmzJmKiIiQJGVmZl773gEAAADXGpWDIhV7cFC1alVNnz7d+nV0dLT++9//FooBAAAAUDYVe3Bw5MiREuwGAAAAAFf7Ww9BAwAAAMoak+GCJySXkWlFxV6QDAAAAOCfjcoBAAAAyhcWJBeJygEAAAAASQwOAAAAAFzEtCIAAACUL0wrKlKxBwdubm5/+RRkk8mkgoKCv90pAAAAAM5X7MHBggULinxtzZo1evfdd2WxWK5JpwAAAICSwq1Mi1bswUGvXr0Kte3du1dPP/20/ve//+nuu+/WSy+9dE07BwAAAMB5rmpB8smTJzV8+HA1aNBABQUF2rJli2bNmqVq1apd6/4BAAAA15Zhcs1WBjg0OEhPT9dTTz2lmjVraufOnVqxYoX+97//qX79+iXVPwAAAABOUuxpRa+//romTpyo6Ohoff7553anGQEAAAAou4o9OHj66afl6+urmjVratasWZo1a5bduPnz51+zzgEAAADXHLcyLVKxBweDBg36y1uZAgAAACi7ij04mDlzZgl2AwAAAHAObmVatKu6WxEAAACAfx4GBwAAAAAkOTCtCAAAAPhHYEFykagcAAAAAJBE5QAAAADljQsWJFM5AAAAAFCmUDkAAABA+cKagyJROQAAAAAgicEBAAAAgIuYVgQAAIDyhWlFRaJyAAAAAEASlYNiu+2hrur75G0Kiw7Rwa1H9cGjM7R3/YEi49v3uUGDXxqg6NgKStifqI+enq1132+2iRn8Yn91u6+TAkL8tfPXPXr3oelKOJBY0pfiNOTMceTMcT3uaaM+wzsqtEKgDu0+qSkvLNC+bceLjG/braEGjeqmqCqhSjiSrE8mLtL61Xusr4dEBGjY6B5q2i5O/kG+2rHukKa8uEAnjyQ743Kc4p6GjXV/s+aq4Oev3cln9MLqldqaZP9n4s469fRml1ts2nILClT7g8k2bU/c0FoD6jdQkLe3Npw8qedWLdeRtLSSugSna1+hs26O6q4gz2CdOH9MXx77VEezD9mNfTxurOIC6xRq35G+RR8eeFOS9GGz2Xb3nX/icy1PWnztOu5C5MxxTcN6qFX4nQrwCNXpnMP6IXGKTp3fV2R8i7BeahLWXUGeFXTenKE9Gb9oddJMmY18SVJ8RD/VCmqtMK8qKjDylJC9W6uSZiglL8FZl1RqmVxwK1On3zr1Krm8cpCQkKB//etfCg8Pl6+vrxo0aKANGza4uls2OvRrrQfeGqzZL32lEc2e0qFtRzVhyViFVAiyG183Pk7PfPa4lsxYqRFNR+vXb9fphQWjFVsvxhrTf3Qv9X6kmyaPmKZHbhijnKxcTVjyrDy9PZ11WSWKnDmOnDmufffGuv+Z2zTn3R/0SM+3dXj3Sb0y634FhwfYja/TNFZPT/6Xln65ViN7TNKaH3boualDVS0u2hozbupQRVcN00sPfKKRPSbpdEKqXv3vA/L29XLWZZWo7tfX0th2HTR57Rr1+Py/2n3mjGb1vlPhvr5F7pORm6sW06dYt7afTLd5/YFmLTSkcRM9u3K5bv/iM53Pz9es3nfKy929pC/HKZqFttKdVe7W4lMLNGH3s0rIPqZHrn9KAR72fzenHXxHT2992Lq9vPMpmQ2zNqWutcZc+frTWx/Wp0emyWJYtDl1nbMuq0SRM8fVCWqvTlHD9cuZzzTj0CNKyjmk/tVelp97sN34usEd1TFqqH4585mmH3hA3yW8ozpB7dUxcog1pqpffW1MWaRPD4/S3CNj5WZy14Bq4+Vp8nbSVaEscungIDU1VW3atJGnp6e+//577dq1S2+99ZZCQ0Nd2a1C7nyih77/aIWWzlytY7tPaPKD05Sbnaeuw26yG3/7o921fskWffXmQh3bk6BZ477QgU2H1Gvk5U/fbn+su+aMn6c1Czfo8PZjmjj4fYVXClWb3i2cdVklipw5jpw57vZ72+v7L37Xsq/X69iBJL337Dzlns9Xl74t7cb3GtJOG37aq3nTV+v4wdP679tLdHBngnoOaiNJqlw9QnWaxur95+Zp37bjSjh8Ru8/N0/e3p7q2LOJMy+txNzXtJm+2LldX+/aqQMpKRq7cpnOF+Srb70Gf7KXoeTsbJvtSsOaNNX769Zq2aGD2pOcrH//8L2i/APUpUbNkr0YJ7kpqpt+TV6l38/+pMSck/r82CfKs+SqdXgHu/HZ5ixlFKRbt9pB9ZVnydOmK97EXvl6RkG6GoU01b7M3Tqbd8ZZl1WiyJnjWobfrq2pS7Q9bZnO5h7XklPvq8CSq4ahXezGV/GtoxPZu7QrfbXS80/rcNZm7Ur/URV946wxXxwbp+1py5Wce0yncw9rUcIkBXtFKtr3emddFsoglw4OJk6cqJiYGH3yySdq2bKlqlevri5duqhGjRqu7JYND08PxTW7TpuWb7O2GYahTcu3qe4NcXb3qRsfp00rttm0bfhhq+pcjI+uHqnwiqHavHy79fXsjGztWXtAdeNrlcBVOBc5cxw5c5yHp7uur19FW37db20zDENbft2nOk2q2d2nTtNq2vKrbYl+4897VadJrCTJ0+vCTMv83AKbY+bnmVWvefVrfAXO5+nmpvqRUfrl2DFrmyHp12PH1DS6YpH7+Xl66Zehw/XrsPs1rUcvXR8Wbn0tJihYkf4B+uXYUWtbZl6etiSeUtPoSiVyHc7kbnJXVb/q2pux09pmyNCezJ2qHlC8wU/riI7amLJGeZZcu68HegSpfnBj/Za8+lp02eXImePcTB6K9q2pw1lbrmg1dCRriyr71ra7z4nzuxXtW9M6GAjxjFaNwOY6eG59kefxcfeXJJ03Z16rruMfyKWDg4ULF6p58+bq27evIiMj1aRJE02fPr3I+NzcXGVkZNhsJS04IlDuHu5KTUq3aU89na7Q6BC7+4RGhyjtj/FJaQq7GH/pz9SktEIxoVH2j1mWkDPHkTPHBYX6X8hZsu1/cqnJ5xRaIdDuPqERgUpNPveH+Exr/PGDp5WUkKIh/3erAoJ85eHprr4P3KgKlUIUFml/OkRZEurrKw83NyVnZ9m0J2dnq4K/v919DqWm6KllSzX8f99o1NLv5GYy6et+AxUdcGHq1qX9/lhN+LNjliUBHoFyN7kro8D2dy0zP11Bnvane1ypmt91quwbo1//5E3sDeHtlGPO0Za00jWl9mqRM8f5uQfJzeSu7IJUm/asgjQFeITZ3WdX+mr9fHq27ol9Q6PrLtSIuBk6lrVda5K/LOIsJnWOfkDHs3YqOfdoETGAiwcHhw4d0pQpU3T99ddr6dKlGjFihB599FHNmjXLbvyECRMUHBxs3WJiYuzGAcDVMBdY9MqIWapcvYK+2vKKvtk5QQ1vqKn1q3fLYrG4unsusTnxlObv2aXdyWe0NuGEHly8UCnns3VX/Yau7lqZ0DqioxKyjxW5EFeS4iM6aH3Kbyq4uIi0vCNnxVPVr4HiI/pp6akP9cnBRzXv2MuqEdBCbSoMtBvfteJDivCupm9PvObknpZShou2MsClgwOLxaKmTZvq1VdfVZMmTXT//fdr+PDhmjp1qt34MWPGKD093bodP170HUmulfTkTJkLzAqNsv20IzQyWKmJaXb3SU1MU8gf46NClHIx/tKff/z0NjQqpNCnvGUROXMcOXNcRmrWhZxF2FYJQiMClHrGfsk8NTlToREBf4gPtIk/sOOERvaYpDsbjdXdN7yo54ZOV2CInxKPp1z7i3Cy1PPnVWCxKMLP9hP9CD8/ncnKKmIvWwUWi3adOa1qIRfWhl3aL8LP76qPWZqdK8iU2TAryMP2dy3QM1gZ+elF7HWBl5u3mofdoN/O/lhkTI2AWor2qfSnn5KXNeTMcdnmDFkMs/w8bNdc+nuE6FyB/X972kfeox3pK7U1banO5B7Rvsw1+vH0LMVH9JVksontEj1CNQNb6rMjTyuz4GxJXQb+IVw6OKhYsaLq1q1r01anTh0du2I+7JW8vb0VFBRks5W0gvwC7dt4SE06XV6sZzKZ1KRTA+363f7txXat2acmN9ku7mvauaF2X4xPPHxaZ0+lqkmn+tbX/QJ9VbtVTe1as7cErsK5yJnjyJnjCvLN2r/jhBq3vrywzmQyqXHr67V7s/2S+e5NR23iJalJmzjt3nykUGx2Zo7SU7JUKTZC1zeI0e/LdlzT/rtCvsWiHaeT1CamqrXNJKl1TFVtSjxVrGO4mUyqFV5Bp7MuTM86npGu01nnbI4Z4OWlxtEVtSnx5DXtvyuYDbOOZR9WraB61jaTTKoVWE+HzxV9m2FJahraUh4mD607+2uRMa3DO+ho1iElnLf//15ZRM4cZzEKlHj+gGL9G13RalI1/8ZKOL/H7j4ebt4y/vBRtGFYLu55eXDQJXqE4oLi9dmRMUrPT7rmfS+rLt3K1NlbWeDSwUGbNm20d6/tm5R9+/apWjX7iwldZd7bi3TrfZ1086AOqlq7sh6dMlw+/t5a+skqSdLomSM17NW7rPEL3l2sFrc0Vp9RPRRTq5Lueb6v4prX0LfvL7kcM3mx7hp7p+J7Nlds/aoaPWukzp5M1a/fFL2QqCwhZ44jZ45b8PFPumVAK3W+o7liakRq5Mt3ytvPS8u+vnCHk3+/OVBD/u9Wa/y3M39Ws/a1dce9HVTlukjd/VgXXd+giv736eU3Im27NVSDVjUUHROmGzrX06ufPqA1y3Zo0y9F32u8LPlo00YNqN9Ad9SpqxqhYXrlps7y8/TU17suDH7e6nKL/q91W2v8Iy1vULuq1RQTFKx6FSL1dtdbVTkoUF/svLzQfcbmTRrZ8gZ1rl5DtcIj9FaXbkrKOqcfDv75G8GyYmXS92oT0VGtwtop2qeSBlQdKm83b625+On24NgH1KtSv0L7tQ7vqK1pG5VlPlfoNUnycfNV09CW/5hFtVciZ45bd3aBGofeogbBnRTuFaNbKj4sTzdvbUtdJknqUfnf6nDFbUoPZK5T09DuqhPUXsGeUYr1b6L2kfdof+Y6GbowSOha8SHVC7lR3554XXmW8/L3CJW/R6g8TP+MWzOjZLj0IWhPPPGEWrdurVdffVX9+vXTunXrNG3aNE2bNs2V3Srkxy9/U0iFIA1+sb9Co0N0cMsRPdNtvNJOXyiPRlaNkGG5PBzctWafJtw9WUNeHqih4+9Swv5TeuH213Vk5+VpUF+8/q18/H30+H8eUECIn3b8skdjuo1Xfu4/Y/4kOXMcOXPcT4u3KDjMX/96oqvCIoJ0cHeCnhsyXWkXFx1HVgqxydnuTUc08fHZGvzvbhry5K1KOHJGLz/4iY7uu/wAsLDIIN0/tpdCIgKUciZDK+Zv1OfvL3P6tZWUxfv3KtzXV6NuaKMIPz/tTj6jId/Msy4orhQYJItxOWfBPj6a0KmLIvz8lJGbq+2nk3Tnl3N1IOXyVIf/bFwvP09PvdrpZgV5e2v9yQQN+Wa+8sxmp19fSdiYulYBHkHqUenOiw/0Oqr397+uzIILN8UI9YqwyZkkRXpXVM3AWnp3X9Hzu5uF3SCTyaT1KWtKtP+uQM4ctzvjJ/l5BKld5D3y9wjV6ZxD+vLoOGWb0yRJQZ4VrJUBSfr1zOeSDHWIHKQAz3BlF6TrwLl1+jHp8rrNpmE9JEn/qv66zbkWJUzS9rTlJX5NKJtMhmG4tMixaNEijRkzRvv371f16tU1atQoDR8+vFj7ZmRkKDg4WB3VSx6mf8ZDnYB/Co/Yqn8dBBt7Hiv7t/50tm5tN/91EHANxPiU/XVHzpJzLl8v3bBc6enpTpkC7ohL7x1rPv2q3L19nHpuc26ODrz2TKnMy5VcWjmQpB49eqhHjx6u7gYAAABQ7rl8cAAAAAA4lStuLcqCZAAAAABlCYMDAAAAAJKYVgQAAIByxhXPHeA5BwAAAADKFCoHAAAAKF9YkFwkKgcAAAAAJDE4AAAAAHAR04oAAABQrrAguWhUDgAAAABIonIAAACA8oYFyUWicgAAAABAEoMDAAAAlDeGizYHffDBB4qNjZWPj49atWqldevWFWu/uXPnymQyqXfv3g6fk8EBAAAAUMp88cUXGjVqlJ5//nlt2rRJjRo1UteuXXX69Ok/3e/IkSN68skn1a5du6s6L4MDAAAAoJSZNGmShg8frqFDh6pu3bqaOnWq/Pz8NGPGjCL3MZvNuvvuu/Xiiy/quuuuu6rzMjgAAABAuXLpVqbO3iQpIyPDZsvNzS3Uv7y8PG3cuFGdO3e2trm5ualz585as2ZNkdf10ksvKTIyUvfee+9V54bBAQAAAOAkMTExCg4Otm4TJkwoFJOcnCyz2ayoqCib9qioKCUmJto97i+//KKPP/5Y06dP/1v941amAAAAKF9ceCvT48ePKygoyNrs7e39tw+dmZmpe+65R9OnT1dERMTfOhaDAwAAAMBJgoKCbAYH9kRERMjd3V1JSUk27UlJSYqOji4Uf/DgQR05ckQ9e/a0tlksFkmSh4eH9u7dqxo1ahSrf0wrAgAAAEoRLy8vNWvWTCtWrLC2WSwWrVixQvHx8YXia9eure3bt2vLli3W7bbbbtONN96oLVu2KCYmptjnpnIAAACA8qUMPCF51KhRGjx4sJo3b66WLVvqnXfeUVZWloYOHSpJGjRokCpXrqwJEybIx8dH9evXt9k/JCREkgq1/xUGBwAAAEAp079/f505c0bjxo1TYmKiGjdurCVLllgXKR87dkxubtd+EhCDAwAAAJQrV95a1JnndNTIkSM1cuRIu6+tXr36T/edOXOm4ycUaw4AAAAAXETlAAAAAOVLGVhz4CpUDgAAAABIYnAAAAAA4CKmFQEAAKBcKSsLkl2BygEAAAAASVQOAAAAUN6wILlIVA4AAAAASKJyAKCEnL6psqu7UOZUrpvo6i6UOW2D9rm6C2VSjOdZV3ehzKnmke3qLpQZmW4WveTqTuCqMTgAAABA+cK0oiIxrQgAAACAJCoHAAAAKGdMFzdnn7MsoHIAAAAAQBKVAwAAAJQ3rDkoEpUDAAAAAJIYHAAAAAC4iGlFAAAAKFdMxoXN2ecsC6gcAAAAAJBE5QAAAADlDQuSi0TlAAAAAIAkBgcAAAAALmJaEQAAAMqfMjLNx9moHAAAAACQROUAAAAA5Qy3Mi0alQMAAAAAkqgcAAAAoLzhVqZFonIAAAAAQBKDAwAAAAAXMa0IAAAA5QoLkotG5QAAAACAJCoHAAAAKG9YkFwkKgcAAAAAJDE4AAAAAHAR04oAAABQrrAguWhUDgAAAABIonIAAACA8oYFyUWicgAAAABAEpUDAAAAlDdUDopE5QAAAACAJAYHAAAAAC5iWhEAAADKFW5lWjQqBwAAAAAkUTkottse6qq+T96msOgQHdx6VB88OkN71x8oMr59nxs0+KUBio6toIT9ifro6dla9/1mm5jBL/ZXt/s6KSDEXzt/3aN3H5quhAOJJX0pTkPOHEfOHNf3xkYa1LW5woP9tf/4Gb3++SrtPGz/+m5sWlPDbm2pmMgQebi761hSqmb/sFHf/b5bkuTh7qYRvduobYPqqlwhWOfO52rtrmN6b97PSk7PcuZllag7qsRrYGwHhXkF6uC5U3p7z7fanXG8yPgADx/dX/MWtY+sryBPPyWdT9Xkff/T78l7JEluMmlYjZvVpWJThXsFKjk3Q9+d3KBZh1c465JK3PXBfVQn9F/ydQ9Xat5+bTz9ps7m7ioyvlbIAF0ffKf8PKKUa07X8XMrteXsB7IYedYYX/cKahwxUpX8W8vd5K1z+Sf0e9LLSsnd7YxLKnEVA/+lKsHD5eVeQefyduvg2Rd1Lm+b3ViTPBQT/KAiA+6Qt0e0svMP6Ujq60o9/5M1Jsi7haoED1eAV315e0Rp1+kHdTZ7mbMuxykC/IcoKPAhubtXUF7+LqWmjlVe/pYioj0UFPiI/P37ycM9Wvn5B5WWPl45uausEcFB/1Zw0JM2e+XnH9CppHYldxFlBQuSi+TSykFsbKxMJlOh7eGHH3Zltwrp0K+1HnhrsGa/9JVGNHtKh7Yd1YQlYxVSIchufN34OD3z2eNaMmOlRjQdrV+/XacXFoxWbL0Ya0z/0b3U+5Fumjximh65YYxysnI1Ycmz8vT2dNZllShy5jhy5ribW8RpVL8Omva/33X3S7O17/gZvf/4HQoN9LUbn5GVoxmL12nIhLka8MKn+t+vO/X80K6Kr1dNkuTj5aHa1SL10aILx3vyw/8pNjpUbz/Sy5mXVaJuimqkkbV66pNDy3Xv2sk6kHlKk5reqxBPf7vxHiZ3vd10uKJ9QvXc1v/qrl/f0MTd85Sck26NuTu2o3pXidfbe77R3b+9qSn7v9PdsR3VJ6aNsy6rRFUN6KymEY9rR8pH+v74IKXl7teNld+Vt3uo3fhqgV3VOPxhbU/5SIuP9tfa06+oamBnNQ5/yBrj6Raom2Omy6ICrU54TIuPDtCm5MnKs2Q467JKVIRfd10X9oyOpb2rzSdvU1beHtWPmilPt3C78dVCRyk6cKAOprykjQldlZj5mepUmCJ/r7rWGHc3P2Xl7dHBlBecdBXO5ed7m0JDXlB6xls6ldRV+Xm7FFnhc7kVkbOQ4KcUEHCPUtPG6mRiB53L+lQRER/L07O+TVxe/h6dONnQuiWd+ef8e4aS4dLBwfr163Xq1CnrtmzZhU8A+vbt68puFXLnEz30/UcrtHTmah3bfUKTH5ym3Ow8dR12k9342x/trvVLtuirNxfq2J4EzRr3hQ5sOqReI2+5HPNYd80ZP09rFm7Q4e3HNHHw+wqvFKo2vVs467JKFDlzHDlz3L9ubqYFP+/Q/37dqcOnUvTq7OXKyStQr7b17cZv3HtCqzYf0JFTKTpxJl2fr9isAyfOqHHNypKkc+fz9PCkeVq2YZ+OJqVqx6FTmvjZStWNjVZ0WKAzL63EDKjWTv87sVbfndygI1mn9cbu+cox56tHZfs/E90rt1CQp5/GbJ2l7elHlZiTqi2ph3Tg3ClrTP2QWP1yZqfWJO9RYk6qVp/ernVn96lOcIzdY5Y1tUPv0sGMb3QoY5Ey8g5r3enXVGDkqEZQT7vxFXwa6EzONh3NXKqsglNKzF6ro5k/KMzn8hvduqGDlF1wWmuTXtbZ3F3KKjipxOy1Opef4KzLKlGVg4cpMfMLJZ2bp+z8Azpw9llZjPOKCuxjNz7Sv7eOp09R6vnVyik4rlOZnyn1/GpVCbrXGpN6/kcdTZuks9k/OOsynCow8AGdy5qjrOwvVFCwTylpo2UxzivAf6DdeD+/PsrIeFc5OStlNh/TuaxPlXN+pYICHrQNNApksZy5YktxwtWgLHPp4KBChQqKjo62bosWLVKNGjXUoUMHV3bLhoenh+KaXadNyy+XQg3D0Kbl21T3hji7+9SNj9OmFbal0w0/bFWdi/HR1SMVXjFUm5dvt76enZGtPWsPqG58rRK4CuciZ44jZ47zcHdT7WpRWrfrqLXNMKR1u4+qwXUVi3WMFrVjVC06TJv2nygyJsDXWxaLoczs3L/dZ1fzMLkrLrCyNqRcnqpmyNCGlP2qF1zN7j5tK9TVjvSj+nft27Ww/XP6NH6U7om9UW4yWWN2pB1Rs7CaivGLkCTVDKiohiGx+j15b8lekBO4yUNh3rWVmL3+ilZDidnrFeHTwO4+Z3K2K8y7tsK9LwwG/D0qqZJ/a53M+s0aU8W/nVJydqtt9ATdUX2Jbon5r2oE/TM+0TXJU4Fe9ZWW89sVrYbScn5TkHcTu/u4mbxkMWx/xyxGjoJ8mpdgT0sTT3l5NlROzs9XtBnKyflZXl7N7O5hkpeMP+TMMHLk7d3Sps3D4zpVqrhZlaJ/V3jYB3J3r3ytO18mmQzDJVtZUGrWHOTl5Wn27NkaNWqUTCaT3Zjc3Fzl5l7+RcjIKPnya3BEoNw93JWalG7Tnno6XTG17f+ChUaHKO2P8UlpCosOkSTrn6lJaYViQqNCrkW3XYqcOY6cOS4kwFce7m46m5Ft0342I1ux0WFF7hfg66Xv37hfXh7uMhuGXpu9Qmt3HbMb6+Xhrkf7tNPSdXuUlZNnN6YsCfbyl4ebu1LyMm3aU/LOqZp/pN19KvmGqWloDS1L3Kz/2zxDlf0i9O/aveXh5q5PDi2XJM0+slr+Hj6a0/pJWQxDbiaTph1YqmWJm+0esyzxdg+Rm8lDOWbbT1tzClIU5Gd/QHU0c6m83YLVOWa6TDLJzeSh/WnztCt1pjUmwLOyrg++Q3vSPtPO1E8U5l1XzSr8WxajQIczF5fkJZU4T/dQmUweyjMn27TnmZPl63md3X1Sz/+sykHDlJ6zXjkFRxXi01rhfl1lMpWP+6a4u4XJZPKQ2XLGpt1iOSNPz5p298nJXa3AwAeUm/e7CgqOyMe7nXx9b7XJWW7eZuWlPqaCgoNyd4tScNAoRVX4RqeSOsow/jnrqHBtlZrBwTfffKO0tDQNGTKkyJgJEyboxRdfdF6nAPzjZOXkaeBLs+Xn7amWdapqVP8OSkhO18a9ttUDD3c3vfZgD5kkTZj9z1lY6yg3mZSWd06v75oniwztzUxQBe8gDYztYB0c3BTVUDdXbKIXt3+uw1lJuj6wkh6N66nk3AwtObXRxVfgfJG+TVUvbKg2nH5dyTk7FOgZo2YVRqm+eZh2pMy4EGRyU0rObm09O0WSlJq7TyHeNXR98B1lfnBwNQ6lvKyaEa+qeeUfJBk6X3BMSee+VlRA6ZpmXJqkpo1TWOibqhj1syRDBQVHlJU9V/7+A6wxOTkrrX/P127lJm9S5Yrr5ed7m7KyP3dBr0sRFiQXqdQMDj7++GN169ZNlSpVKjJmzJgxGjVqlPXrjIwMxcSU7JzW9ORMmQvMCo0KtmkPjQxWamKa3X1SE9MU8sf4qBClXIy/9OeVbZe+Prj1yDXqueuQM8eRM8elnTuvArNF4UF+Nu3hQX5/emchw5BOnE6TJO07fkbVK4ZpaLeWNoMDD3c3vfZAD1UMD9KDb371j6gaSFJ6XpYKLGaFedmunwjzCtDZ3Ey7+yTnZcpsMctyxf9qR7NOK8I7SB4mdxUYZj0U111zDq/SiqStkqRD5xIV7ROie6rfWOYHB7nmNFmMAvm421ajfDzClFNw1u4+DcMf1OHM73Qw41tJUnreQXm4+ahl5DPakfKJJEM5BclKzztss1963hHFBNxYItfhTPnmVBlGgbzcI2zavdwjlG8+Y38fS4p2n35QJpOXPN1ClWdOUmzoaOUU2K/q/dOYLSkyjAK5u1WwaXdzqyCz+bTdfSyWs0o+O1SSt9zdQmW2JCokeKwK/iRnhpGh/IJD8vCofi27j3+YUlGvO3r0qJYvX6777rvvT+O8vb0VFBRks5W0gvwC7dt4SE06XZ5bajKZ1KRTA+36fZ/dfXat2acmN9nORW3auaF2X4xPPHxaZ0+lqkmny4sm/QJ9VbtVTe1aU/bn6JIzx5EzxxWYLdpzNEkt6lS1tplMUovaVbX90Kk/2dOWyWSSp6e79etLA4OYqBCNeOtrpWflXNN+u1KBYda+zAQ1C7s8TcEkk5qF1dTO9KN299medkSV/cJlumKNQYxfhJJzM1RgmCVJPm6eNoMHSTIbhs26hLLKogKl5O5RlN+VC7ZNivZtruSc7Xb38TB5XxiFXsEwLBf3vJCTMznbFORlOy0pyLOqsvLL/m2GDeUrM2+HQnxaX9FqUohPvDJy/3yqmWHkKc+cJJM8FOF3i85mLy/ZzpYa+crL3yYfn7ZXtJnk491WeXl/NcDOldmSKMlDvr7ddf780iIjTSY/eXhUk9mSdC06XaZdegias7eyoFQMDj755BNFRkaqe/furu6KXfPeXqRb7+ukmwd1UNXalfXolOHy8ffW0k8u3Et49MyRGvbqXdb4Be8uVotbGqvPqB6KqVVJ9zzfV3HNa+jb95dcjpm8WHeNvVPxPZsrtn5VjZ41UmdPpurXb9YXOn9ZRM4cR84cN3vZRt3evoF6tK6r2IphGvOvzvL19tTCX3dKkl4cdotG3nH5P9uh3VqoVd2qqhwRrNiKYfpXl2bqfkMdfX/Fcw4mPthDdWKj9Oz07+TuZlJ4kJ/Cg/zk4V4q/rn82+Ye/Vk9K7fULRWbqZp/pJ6sc7t83b20+OQGSdKz9frrgZqX73j1zfE1CvL002O1blOMX4TiI2rrnuo3af7xy4tNf03erUHVb1J8RG1F+4SqfYV66l+tnX46vcPp11cS9qR+pppBvVQ9sLuCPGPVIvIpebj56lDGIklSfNQLanTFbUoTsn7R9cF3qFrAzfL3qKRov5ZqGP6AErJ+liGL9ZgRPvVVN3SIAjyrqFpgV9UM7q396V+55BqvtYT0GYoO7K9I/zvk61lDNcNflpvJT0mZX0uS4iLeVGzI5fvvB3o1UrhfF/l4xCjIu7nqR30iyaQTGdOsMW4mP/l71ZG/Vx1JkrdHFfl71ZG3e/FuQFDaZWb+RwH+d8vfr688PK5XaMhEubn56VzWXElSeOi7Cg56xhrv5dVEvj63yt29qry9Wiky4jOZ5KaMzA+sMSHB4+TtFS939yry8mquiPAZkmFRdvY3zr48lCEun1ZksVj0ySefaPDgwfLwcHl37Prxy98UUiFIg1/sr9DoEB3cckTPdBuvtNMXFoNGVo2QYbk8HNy1Zp8m3D1ZQ14eqKHj71LC/lN64fbXdWTn5YcMffH6t/Lx99Hj/3lAASF+2vHLHo3pNl75uflOv76SQM4cR84ct2z9PoUG+OnBXq0VHuSnfcfP6JF35ivl4iLl6PBAGVd8guvj7amn7+6kyNBA5eYX6MipFD378fdatv5CtaVCSIA6NrnwqfrcFwbZnOv+N74stC6hLFqZtFUhXv66r0YXhXkH6kDmSf1708dKzTsnSYryCbGpApzOTdeoTR/p0biemnnDE0rOzdBXx37RnCOrrTFv7/lWw2t00b9r365QrwAl52Zo4Ym11jUJZd2xc8vl4x6qhuH3y8c9XKl5+7Qq4THrImU/jyjrm35J2pEyQ4YMNQx/UL4eFZRrTlNC1s/W9QWSlJK7Wz+dGq3G4Q+pQdi9OldwUhvPTNKRzKI/9S1LkrMXyzMlTNVCH5eXe4TO5e3WzqShyrdcmIrl7VFRuiJnbiZvxYaMko9nVZktWUo5/6P2Jv9bZsvl6W6B3g3UMPoz69c1wp6VJCWdm6d9yaOdc2ElKPv8QrmlhSs4aPTFh6Dt1Onku2SxXFjY7e5R2ebnzCQfhQQ/JQ+PqrJYspWTs0JnUx6RYVy+WYu7e0WFh394YdqR+axy89Yp6XR3WSz2p8QBkmQyDNfeV+mHH35Q165dtXfvXsXF2b9lY1EyMjIUHBysjuolD9M/46FOwD9FyrB4V3ehzPHtX/anlDjbQ7GrXd2FMinGkzeHjqrmkf3XQZAkZWZaVL/uaaWnpztlCrgjLr13bHLXeLl7+Tj13Oa8HG3+bGypzMuVXP5RfZcuXeTi8QkAAAAAlYLBAQAAAOBMrlggzIJkAAAAAGUKgwMAAAAAkphWBAAAgPKGJyQXicoBAAAAAElUDgAAAFDOsCC5aFQOAAAAAEhicAAAAADgIqYVAQAAoHxhQXKRqBwAAAAAkETlAAAAAOVQWVkg7GxUDgAAAABIonIAAACA8sYwLmzOPmcZQOUAAAAAgCQGBwAAAAAuYloRAAAAyhWekFw0KgcAAAAAJFE5AAAAQHnDQ9CKROUAAAAAgCQGBwAAAAAuYloRAAAAyhWT5cLm7HOWBVQOAAAAAEiicgAAAIDyhgXJRaJyAAAAAEASlQMAAACUMzwErWhUDgAAAABIYnAAAAAA4CKmFQEAAKB8MYwLm7PPWQZQOQAAAAAgicoBAAAAyhkWJBeNygEAAAAASQwOAAAAAFzEtCIAJcLrXBmpn5Yi3SvtcHUXypwYz7Ou7kKZ1MaHzwYddaLA1T3ANcUTkovEvw4AAAAAJFE5AAAAQDnDguSiUTkAAAAAIInKAQAAAMobHoJWJCoHAAAAACQxOAAAAABwEdOKAAAAUK6wILloVA4AAAAASKJyAAAAgPKGh6AVicoBAAAAAEkMDgAAAABcxLQiAAAAlCssSC4alQMAAAAAkqgcAAAAoLyxGBc2Z5+zDKByAAAAAEASlQMAAACUN9zKtEhUDgAAAABIYnAAAAAA4CKmFQEAAKBcMckFtzJ17umuGpUDAAAAAJKoHAAAAKC8MYwLm7PPWQZQOQAAAAAgicEBAAAAgIuYVgQAAIByxWS4YEFy2ZhVROUAAAAAwAVUDgAAAFC+8ITkIlE5AAAAAEqhDz74QLGxsfLx8VGrVq20bt26ImOnT5+udu3aKTQ0VKGhoercufOfxheFwQEAAADKFZNhuGRzxBdffKFRo0bp+eef16ZNm9SoUSN17dpVp0+fthu/evVqDRw4UKtWrdKaNWsUExOjLl26KCEhwaHzMjgAAAAAnCQjI8Nmy83NtRs3adIkDR8+XEOHDlXdunU1depU+fn5acaMGXbj58yZo4ceekiNGzdW7dq19dFHH8lisWjFihUO9Y/BAQAAAOAkMTExCg4Otm4TJkwoFJOXl6eNGzeqc+fO1jY3Nzd17txZa9asKdZ5srOzlZ+fr7CwMIf6x4JkAAAAlC+Wi5uzzynp+PHjCgoKsjZ7e3sXCk1OTpbZbFZUVJRNe1RUlPbs2VOs0z311FOqVKmSzQCjOBgcAAAAAE4SFBRkMzgoCa+99prmzp2r1atXy8fHx6F9GRwU020PdVXfJ29TWHSIDm49qg8enaG96w8UGd++zw0a/NIARcdWUML+RH309Gyt+36zTczgF/ur232dFBDir52/7tG7D01XwoHEkr4UpyFnjiNnjruzS2Pd3bO5woL9deDYGU36ZKV2HbR/fbfd1EDd2tfVdVUiJEl7Dydp6txfbOLXzP233X3fn/2j5izacO0vwAWahvVQq/A7FeARqtM5h/VD4hSdOr+vyPgWYb3UJKy7gjwr6Lw5Q3syftHqpJkyG/mSpPiIfqoV1FphXlVUYOQpIXu3ViXNUEqeY4vgSrOKgf9SleDh8nKvoHN5u3Xw7Is6l7fNbqxJHooJflCRAXfI2yNa2fmHdCT1daWe/8kaE+TdQlWChyvAq768PaK06/SDOpu9zFmXU/I8W8jkf5/kWU8m9yhZUkdIucv/fB+vljIFPiN5XC+ZT8nI+lA6P982xu/uC8d1qyDl75GR+ZKUb//7UBYF+A9RUOBDcnevoLz8XUpNHau8/C1FRHsoKPAR+fv3k4d7tPLzDyotfbxycldZI4KD/q3goCdt9srPP6BTSe1K7iLKiKtZIHwtzllcERERcnd3V1JSkk17UlKSoqOj/3TfN998U6+99pqWL1+uhg0bOtxPl645MJvNeu6551S9enX5+vqqRo0aevnll2U4+Zv1Vzr0a60H3hqs2S99pRHNntKhbUc1YclYhVSwP+qrGx+nZz57XEtmrNSIpqP167fr9MKC0YqtF2ON6T+6l3o/0k2TR0zTIzeMUU5WriYseVae3p7OuqwSRc4cR84c1ym+lh69p4M+/nqNhoz5r/YfPaO3x9yp0CBfu/FN68Zo2a97NPLlL3X/uM+VdDZT7zxzpyqEBlhjuj8wxWZ7ZcoSWSyGVq3b76zLKlF1gtqrU9Rw/XLmM8049IiScg6pf7WX5ecebDe+bnBHdYwaql/OfKbpBx7QdwnvqE5Qe3WMHGKNqepXXxtTFunTw6M098hYuZncNaDaeHmaCpfKy6IIv+66LuwZHUt7V5tP3qasvD2qHzVTnm7hduOrhY5SdOBAHUx5SRsTuiox8zPVqTBF/l51rTHubn7KytujgykvOOkqnMzkKxXskZHxYvHi3avIFDJdyvtdRvJtMrJnyhQ0XvJqeznG51aZAp+Rce59Gcm9pYLdMoXOkNwcm09dWvn53qbQkBeUnvGWTiV1VX7eLkVW+FxuRfychQQ/pYCAe5SaNlYnEzvoXNanioj4WJ6e9W3i8vL36MTJhtYt6UwvZ1wO/iYvLy81a9bMZjHxpcXF8fHxRe73+uuv6+WXX9aSJUvUvHnzqzq3SwcHEydO1JQpU/T+++9r9+7dmjhxol5//XW99957ruxWIXc+0UPff7RCS2eu1rHdJzT5wWnKzc5T12E32Y2//dHuWr9ki756c6GO7UnQrHFf6MCmQ+o18pbLMY9115zx87Rm4QYd3n5MEwe/r/BKoWrTu4WzLqtEkTPHkTPHDezeTAtXbtfiH3fqSEKKXv9omXLz8tWjYwO78S+8/53mL9uq/UfP6OjJFE34zw9yM5nUvH5Va0xKerbN1q55TW3adUwnT6c767JKVMvw27U1dYm2py3T2dzjWnLqfRVYctUwtIvd+Cq+dXQie5d2pa9Wev5pHc7arF3pP6qib5w15otj47Q9bbmSc4/pdO5hLUqYpGCvSEX7Xu+syypRlYOHKTHzCyWdm6fs/AM6cPZZWYzzigrsYzc+0r+3jqdPUer51copOK5TmZ8p9fxqVQm61xqTev5HHU2bpLPZPzjrMpwr7ycZ596WcotXDTH5DpTMJ2RkviaZD0rZs6WcJTL5D70c4zdMyv5COj9PMh+QkTFOMs5Lvva/D2VNYOADOpc1R1nZX6igYJ9S0kbLYpxXgP9Au/F+fn2UkfGucnJWymw+pnNZnyrn/EoFBTxoG2gUyGI5c8WW4oSrwbUwatQoTZ8+XbNmzdLu3bs1YsQIZWVlaejQC78XgwYN0pgxY6zxEydO1HPPPacZM2YoNjZWiYmJSkxM1Llz5xw6r0sHB7/99pt69eql7t27KzY2Vn369FGXLl2u6oENJcXD00Nxza7TpuWXy5aGYWjT8m2qe0Oc3X3qxsdp0wrbMueGH7aqzsX46OqRCq8Yqs3Lt1tfz87I1p61B1Q3vlYJXIVzkTPHkTPHebi7qVb1KK3ffszaZhjS+u3HVD+uYrGO4ePtIQ8PN2Vk5dh9PTTYT22a/H97dx4eVXX/cfx9ZyaTfQ8kAdkk7JuASAMqKlQERbAKiBQBcflpqSAKiiiiiFqKFmxdaluQuoIbWBeoG1QW2UHZFwmbEEjIvk1m5v7+IEwYmSijZIaQz+t57kPmzPfec+43EHLmLLcJ//lq81lpc7BZDBsp4WnsLdp4SqlJRtFG6oe39HnOwZJtpISneToDcSEpNI2+mD2Fa6qsJ8waCUCJq+BsNT1oDEKItrclt3TFKaUmuaUriAnt6PMci2HHbXpvTeg2S4kJ+2Wf4tUK9o7gWOFVZDqWQcjJHIdASBtMh/f3AccKjBDf34eaJQR7SHtKS78+pcyktPRr7PbOPs8wsGP+6O+ZaZYSGnqJV5nNdiH1UjdQL+UbEhNewGqtf7YbXzOZQTr8MHjwYGbMmMHkyZO56KKL2LhxI4sWLfIsUt6/fz+HDx/2xL/00ks4HA5uuukmUlNTPceMGTP8qjeoaw66devGK6+8ws6dO2nevDmbNm1i2bJlPPfccz7jy8rKvPaCzc/Pr/Y2xiZFY7VZycn0/tQw52geDVr6/gcWnxJH7o/jM3NJSIkD8PyZk5l7Wkx8ctzZaHZQKWf+U878FxcTjs1q4XhekVf58bxiGtU/s2kG99xyOcdyiljz3T6f7/e9vA3FpQ6WnCdTiiKsMVgMK8XOHK/yImcuiRENfJ6zNW8JEdYYhjX+MxgGVsPG+uMfszJrfhW1GPRKuYsDRVvIKvOd15okxBqPYdhwuLK8yh2uLMJDLvR5Tk7J19SPuY280jWUOvcRF9aNxIjeGIZ2D6+SJQnT7Z1jXFkYlmhMQsESi2HYfMRkg71p4NpZTayWBAzDhst9zKvc7T5GSEiaz3NKy5YQHX0XZY5vcDozCAu9jPDwvl5/z8ocG3DkjMHp3IPVkkxszDiS6yzgcOYVmGaRz+vKuWX06NGMHj3a53tLlizxep2RkXFW6gxq5+Chhx4iPz+fli1bYrVacblcTJs2jaFDh/qMf/rpp3n88TOcvygi8hOGXX8Jv+3WgnuemI+j3OUzpt8VbVm8bHuV79cGDSPakZ40iMWHX+SHkh3E21PplXIX3esMYfmxt06L7516D0mhjXh97wM+rlY7fH98KmlJT3Fx/f8CJiXO/WQWvkty1MBgN03OIzm5k0mIn0Fq8teAidOZQVHx20RG3uyJKS390vN1Odsoy1pP/dQ1RIRfT1Hx6f9+axXTPHEEus4aIKgfY8yfP5833niDN998k/Xr1zN37lxmzJjB3LlzfcZPnDiRvLw8z3HgwIFqb2NeVgEup4v4ZO/FevF1Y8k5kuvznJwjucT9OD45juMV8Sf//PGnt/HJcad9ylsTKWf+U878l5tfgtPlJiE20qs8ITaC7Nyf/kTslusuZlj/Lox56j327M/yGdOhZX0a1U/gwy+/8/l+TVTsysdtuoiwxXuVR9riKHT6nod8ed1hbM77kk25izlWlsHOgpUsPTqX9KSBgOEVe3XK3aRFX8KbGQ9R4MyurtsIqHJXDqbpxG5N8iq3W5Modx3zfY77ONuO/h/L97dl9cHLWXfot7jMYkqd+33GC+DOwrB45xhrEqa7ACgD94nvA6fFJILb9/ehJnG5j2OaTqyWOl7lFksdXK6jPs9xu7PJyh7JgUNN+eFwFw5nXoZpFuH8ib9npplPufN7bLYmZ7X9cn4Jaudg/PjxPPTQQ9x88820a9eOYcOGcd999/l8UhyceEjEyb1hA7FHLICz3MnOdd/TsWflAkfDMOjYsx1bv/G99d/WlTvpeJX3gshOvdqzrSL+yN6jZB/OoWPPyh0FIqLDadk1ja0rd1TDXQSWcuY/5cx/TpebHXszvRYTGwZc3LYhm3cervK8of26MPJ3v+G+p99n+/eZVcb1u7It2/YcYff+mv+Lx0lu08mRkt00juxwSqlBo8iLOFTi+6E6Nkso5o8mypqmu+LMys7B1Sl30zwmnTczJpJXXnVeaxqTcgocm4kL63ZKqUFcWDr5ZRuqPA/ANB04XJkY2EiKuIbs4p/ZyrM2c2wAu/cOLIa9O5SfzHE5lG/B8IoxwN4Ns/ynvw81QzmO8m8JCztldyYMwkIvxeFY9zPnluFyHwFshIdfS0nJ4iojDSMCm60RLvf582/0lzLM4Bw1QVA7B8XFxVgs3k2wWq243YF+ZN1Pe+8vH9H39p789tYeNGxZn3tfuoOwyFAWzzmxl/CEV0dz21O3eOI/eP5julxzETeNu44GLeox7LGBNL+4KQv/tqgyZtbH3DLpRtL7XUzjtg2ZMHc02T/ksHxB1Yv8ahLlzH/Kmf/e+ngd11/Vjr6Xt6ZRvQQmjOpFWGgIHy09sYB48j3XcPfNlf/Z/v76Ltw5qBvTXl7M4WN5JMRGkBAbQfiPtnaNCLdzVdcW/Oer82fU4KTV2R9wUfw1tIvtSaK9Adek/oEQSyjf5pzYVea6+vfT45RtSncXrKZT/LW0irmc2JBkGkd25PK6w9hVsBqz4nGfvVPvoU3clSw8OB2Hu4RIWzyRtnhshj0Yt3jWHcqbTUr0YOpG/o7wkKakJU7FYkSQWfAuAM2TZtA4rnIaVbS9A4kRVxNma0BM6MW0TZ4DGBzMf8UTYzEiiLS3ItLeCoBQ2wVE2lsRaj2zxfTnPCMCbK1OHADWC058bTlxf0bU/Rix0z3hZslbYG2AETUBrBdC+C0Q1gezaE5lTPFsiBgMYTeAtSlGzBMntkwteS+gt1ZdCgr+TlTkUCIjBmKzNSM+7k9YLBEUFr0NQGL888TGPOyJt9s7Eh7WF6u1IaH2rtRNehMDC/kFL3hi4mInE2pPx2q9ALv9YpISZ4Ppprh4QaBvT2qQoK456NevH9OmTaNhw4a0adOGDRs28Nxzz3HbbbcFs1mnWTp/BXF1Yhj++GDiU+LYszGDh/tMI7dia8O6DZMw3ZXdwa0rd/L00FmMmDqEkdNu4dCuw0y5YToZWyqnQc2bvpCwyDDG/v0uouIi2LxsOxP7TKO8rDzg91cdlDP/KWf++2LlDuJjwrl9YHcS4yLYte8Y9z3zHjl5xQAkJ8XgPmWO5+9+2wF7iI2nx13vdZ1/vruCf7270vP6t91aYBjw3+Vn9oj6mmRb/v+IsMVwWd1hRNriOVr6PfP3TabYlQtATEgdz8gAULGuwKRH3VuJCkmk2JnH7sLVLM2snP7ZKeE6AH7fZPqpVfHRoef4Lrfmf1qeVfwxIccTaBQ/Frs1iULHNrZkjqTcfWLqVKgtFajMmcUIpXHcOMJCGuJyF3G8ZCk7su7H5a7cvSk6tB3tU970vG6a8AgAmYXvsTNrQmBurDqFtMWS8IbnpSVmEgBmyfuYeQ+CtS5Y61XGuw5i5t6BET0JI3I4uI5g5k8Cx7LKmNJPMC0JGNFjKh6Ctg0zZxS4z48pbMUlH2LJTSQ2ZkLFQ9C2cDTrFtwVi7CttvqeDjmAQRhxsQ9iszXE7S6mtPQLso//EdOs3KzFak0lMfFFrJZ4XK5syhyryTx6Le7zJGdSPQwziE8cKygo4NFHH+WDDz7g6NGj1KtXjyFDhjB58mTs9p//xCk/P5/Y2FiuoD824/x4qJPI+aJw0G+C3YQa5/pHv/j5IPFyaWTNnyIXDN3DtHOSvw46/dsrvjYrKHDTtvVR8vLyAjIF3B8nf3fskf4INltYQOt2OktZuvLJczIvpwrqyEF0dDQzZ85k5syZwWyGiIiIiIgQ5M6BiIiIiEigGe4TR6DrrAk0rigiIiIiIoA6ByIiIiIiUkHTikRERESkdtETkqukkQMREREREQE0ciAiIiIitY1ZcQS6zhpAIwciIiIiIgKocyAiIiIiIhU0rUhEREREahXDNDECvEA40PX9Uho5EBERERERQCMHIiIiIlLbaCvTKmnkQEREREREAI0ciIiIiEhtYwLuINRZA2jkQEREREREAHUORERERESkgqYViYiIiEitoq1Mq6aRAxERERERATRyICIiIiK1jUkQtjINbHW/lEYOREREREQEUOdAREREREQqaFqRiIiIiNQuekJylTRyICIiIiIigEYORERERKS2cQNGEOqsATRyICIiIiIigEYORERERKSW0UPQqqaRAxERERERAdQ5EBERERGRCppWJCIiIiK1i7YyrZJGDkREREREBNDIgYiIiIjUNho5qJJGDkREREREBFDnQEREREREKmhakYhUi6IUffbgrzW5jYLdhBpnaOyGYDehhooKdgNqnAtsytmZyre5gaPBbsZP07SiKul/bxERERERATRyICIiIiK1jRswglBnDaCRAxERERERATRyICIiIiK1jGGaGAFeAxDo+n4pjRyIiIiIiAigzoGIiIiIiFTQtCIRERERqV20lWmVNHIgIiIiIiKARg5EREREpLZxm2AE+JN8t0YORERERESkBlHnQEREREREAE0rEhEREZHaRguSq6SRAxERERERATRyICIiIiK1ThBGDtDIgYiIiIiI1CAaORARERGR2kVrDqqkkQMREREREQHUORARERERkQqaViQiIiIitYvbJOALhPWEZBERERERqUk0ciAiIiIitYvpPnEEus4aQCMHIiIiIiICqHMgIiIiIiIVNK1IRERERGoXPeegSho5EBERERERQCMHIiIiIlLbaCvTKmnkQEREREREAI0ciIiIiEhtozUHVdLIgYiIiIiIAOociIiIiIhIBU0rEhEREZHaxSQI04oCW90vpZEDEREREREBNHJwxq6/pzcDH7iehJQ49mzaxwv3zmbHmt1Vxl9+028Y/sTNpDSuw6FdR/jnQ6+z+tMNXjHDHx9Mn9t7EhUXyZbl23n+nn9waPeR6r6VgFHO/Kec+W/wpR0YcVVnkqIj2fnDMZ5+7ys278/0GduzfRq397qEBnViCbFY2ZeVw7+/Ws9Ha7d5xTVJTuC+fpfSuekF2CwW9mRmM272RxzJLQjELVW7a1J6MKD+b4mzx5BRdJB/fj+P3YX7fMY+0fY+2sY2P6183fHvmLbtRQAGN7iW7kkXkxQaj9N0sadwP2/uW8iuwozqvI2AioocQUz0PVitdXCUbyUnZxKO8o1VRNuIif4jkZGDsFlTKC/fQ27eNErLvvJExMbcT2zMA15nlZfv5nDmZdV3E4EU0gUj8nYIaYNhTcadczeUff7T59gvwYh+GGzNwHUYs+hFKHnfOyZi6InrWupA+XbMgieg/Nvqu49AUs4CSwuSqxTUkYOCggLGjh1Lo0aNCA8Pp1u3bqxZsyaYTfKpx6Bu3PXscF5/4h3u7vwg33+7j6cXTSKuTozP+NbpzXn4zbEsmv0ld3eawPKFq5nywQQat2ngiRk8oT8D/tiHWXe/wh9/M5HSojKeXvQIIaEhgbqtaqWc+U8581/vjs0ZP+ByXl70DYNnvMGOQ1m8/H+/IyEq3Gd8XnEp//hsFcNmzuPG6a+xcNVWnhhyNd1aNvLEXJAYy9x7B7E3M4dRf3uHG6e/xiuLV+FwOgN1W9Wqe1JnRja5kfkHPuaBjU+RUXSQyW3uJTYk2mf89O1/57bVD3qOMeufwGW6WJG93hPzQ8lR/vn9PO7b8CSTvp3BsbJsJre5lxhbVKBuq1pFhF9PfNwU8vKf5XBmb8odW6lb5y0slkSf8XGxDxIVNYyc3En8cKQHhUX/JinpX4SEtPWKc5Rv5+AP7T1H5rH+gbidwDDCwbkdM//xM4u3XoAR9w9wfIOZdT1m8asYMdPAfmllTFhfjOiHMQv/hpk1AJzbMOJngyWhWm4h4JQzOUcEtXNw++2389lnn/Haa6/x3XffcfXVV9OrVy8OHToUzGad5sb7ruPTf37B4leXsH/bQWb93yuUFTvofdtVPuNvuPda1izayDszPmT/9kPMnTyP3eu/p//oaypjxlzLG9PeY+WHa9n73X7+NPxvJNaLp/uALoG6rWqlnPlPOfPfrVd04r2Vm1m4eivfZx5n6jufU+JwMqBrW5/xa3cf5Mvv9rA38zgHs/N4438b2PXDMTo2qeeJ+eO13fl6awZ/+c/XbD90jIPZeSzZ8j3HC0sCdVvVql+9nnyWuZwvj67kYMkR/r7nLcpcDq6qm+4zvtBZTG55vufoENeKMpeDFVmVnYOvs9bwbd52MsuyOFBymDl73yXSFk6jyPqBuq1qFR19F4VFb1BUPA+ncyfHcyfgNkuIihziMz4i4iby85+ntPRLXK79FBb9m9KSL4mJ+j/vQNOJ233slON4AO4mQBz/wyz8C5R9dkbhRvgQcB3ELHgGXHug+HUoXYQRObIyJuI2KJ4HJe+Bazdm/mQwSyD8puq6i8BSzuQcEbTOQUlJCe+99x7Tp0/n8ssvJy0tjSlTppCWlsZLL70UrGadxhZio3nnC1n/eeUQnGmarP/8W1r/5vShdjjxie76L7yH7Nb+dxOtKuJTmtQlMTWeDZ9/53m/OL+Y7at20zq9RTXcRWApZ/5Tzvxns1podUEy3+zc7ykzTVi1cz8dGqee0TW6NmtA47oJrNtz4gMJw4DLWzdh37EcXvq/G1gy9S7euO9mrmzXtFruIdBshpWmUQ35Nne7p8zE5Nu87bSIvvCMrtEzuRvLstZS5nZUWcfVyZdS5Cwmo+jgWWl3cIVgD2lPaenXp5SZlJZ+jd3e2ecZBnZMs8yrzDRLCQ29xKvMZruQeqkbqJfyDYkJL2C1nh+dqV/E3hEcK7yKTMcyCOlY8SoEQtpgesWY4FiB4YmpZZSzX8ftDs5RAwRtzYHT6cTlchEWFuZVHh4ezrJly3yeU1ZWRllZ5Q/c/Pz8am0jQGxSNFablZzMPK/ynKN5NGjp+wd5fEocuT+Oz8wlISUOwPNnTmbuaTHxyXFno9lBpZz5TznzX3xkODarheyCYq/y7IJimiTHV3leVJidzx+/gxCbFbfbZNq7X3o6GAlREUSG2RnVswt//WQ5M/+zjO4tG/OXkf0Y9cI7nk5ETRUdEoXVsJJb7v2zM9eRT/3Y5J89Py2qEY0i6/PC7tdOe69zfFvGtRhFqMVOjiOfx7c8T4Gz6Ky1PVislgQMw4bLfcyr3O0+RkhIms9zSsuWEB19F2WOb3A6MwgLvYzw8L4YRuXncWWODThyxuB07sFqSSY2ZhzJdRZwOPMKTLPm581vliRMd5Z3mSsLwxKNSShYYjEMm4+YbLCfH513vylnUk2C1jmIjo4mPT2dqVOn0qpVK5KTk3nrrbdYuXIlaWm+f+A+/fTTPP74Gc7FExHxoajMwcA/v05EqJ2uzRrwwIDLOZidx9rdB7EYBgBfbd7D60tPLOzecegYFzVJZVD39jW+c/Br9UruTkbRQZ+Llzfn7eT+jU8RY4uiV0p37m9xOw99O5288vNjEbc/cnInkxA/g9TkrwETpzODouK3iYy82RNTWvql5+tytlGWtZ76qWuICL+eouK3gtBqkVpGC5KrFNQ1B6+99hqmaVK/fn1CQ0N5/vnnGTJkCBaL72ZNnDiRvLw8z3HgwIFqb2NeVgEup4v45Fiv8vi6seQcyfV5Ts6RXOJ+HJ8cx/GK+JN//vjT2/jkuNM+5a2JlDP/KWf+yykqwelykxgd4VWeGB1BVn5xFWed+Nl8ICuPHYeO8e8l6/l84y5G9eriuWa5y8WeI9le53yfeZyUON8Lw2uSgvJCXKaLuBDve4mzx5Dr+OmR2FCLne5JF/NF5gqf75e5HRwpPcbOwr28uPt1XKabnnW7nbW2B4vLfRzTdGK11PEqt1jq4HId9XmO251NVvZIDhxqyg+Hu3A48zJMswinc7/PeADTzKfc+T02W5Oz2v4aw52FYUnyLrMmYboLgDJw52CaTjgtJhF+NKpTayhnUk2C2jlo2rQpS5cupbCwkAMHDrB69WrKy8u58ELfc19DQ0OJiYnxOqqbs9zJznXf07FnO0+ZYRh07NmOrd/s9HnO1pU76XhVO6+yTr3as60i/sjeo2QfzqFjz8pFkxHR4bTsmsbWlTuq4S4CSznzn3LmP6fLzbaDmXRtVrk7k2FA1+YN2JRx+IyvY1gM7Dar55pb9mfSuK73Th6N6sRzOKf6pzFWt5PbjLaPrVxzYmDQPrYFOwq+/8lzuyV1IsRiY+mx1WdUlwWDEMv5sFt2OY7ybwkLO2UHGAzCQi/F4Vj3M+eW4XIfAWyEh19LScniKiMNIwKbrREut+9teM97jg1g914Ub9i7Q/nJrZnLoXwLhleMAfZumOXe2zfXGsrZr3Ny5CDQRw1wTjwELTIyktTUVHJycli8eDH9+59b27m995eP6Ht7T357aw8atqzPvS/dQVhkKIvnnNizesKro7ntqVs88R88/zFdrrmIm8ZdR4MW9Rj22ECaX9yUhX9bVBkz62NumXQj6f0upnHbhkyYO5rsH3JYvuDc28r1l1DO/Kec+e/fS9ZzY3o7ru/SmibJCTwysCfh9hAWrNoCwLShvbn3uu6e+FG9uvCb5g2pnxhLk+QEbr2iE9dd3IqP11Yu0H31y7Vc07E5N/6mLQ2SYrn50g70aHMh85ZtCvj9VYf//PAFvVIu5Yo6v6F+eAp3NR1CqDWUL4+uBODeZsMZ2uj0n8E963ZndfYmCn+0jiDUYmdow/40j2pCndAELoxsyB/ShpEQGue1o1FNVlDwd6IihxIZMRCbrRnxcX/CYomgsOhtABLjnyc25mFPvN3ekfCwvlitDQm1d6Vu0psYWMgveMETExc7mVB7OlbrBdjtF5OUOBtMN8XFCwJ9e9XDiABbqxMHgPWCE19bTmwWYETdjxE73RNulrwF1gYYURPAeiGE3wJhfTCL5lTGFM+GiMEQdgNYm2LEPHFi+8+S9wJ6a9VGOZNzRFA/1lm8eDGmadKiRQt2797N+PHjadmyJSNHjvz5kwNo6fwVxNWJYfjjg4lPiWPPxgwe7jON3KMnFoPWbZiE6a7sDW5duZOnh85ixNQhjJx2C4d2HWbKDdPJ2FI5DWre9IWERYYx9u93ERUXweZl25nYZxrlZeUBv7/qoJz5Tznz3+INO4mPDOeePukkxUSw49Ax7v77BxwvPDGtKCU+Gvcpn9SE20OYNPAqkmOjKSt3svfocR5+fRGLN1SOznz53R6mvvMFo3p14cHfXUnGseOMm/MfNuz9IeD3Vx2WZ60jxhbFkIbXEWePYW/RQaZu+atnbUBSaIJXzgDqhSfTOjaNxzfPOu16btNN/Yhkrqh7JzEhkRQ4i9hdsI9HvnuWAyVnPoJzLisu+RBLbiKxMRMqHoK2haNZt+CuWOhptdXHpHIXEoMw4mIfxGZriNtdTGnpF2Qf/yOmWTn6ZLWmkpj4IlZLPC5XNmWO1WQevRa3O/u0+mukkLZYEt7wvLTETALALHkfM+9BsNYFa+UWwrgOYubegRE9CSNyOLiOYOZPAscpG5SUfoJpScCIHlPxQK9tmDmjQDmrvTmTamGYZvDGOObPn8/EiRM5ePAgCQkJ3HjjjUybNo3Y2NifP5kTuxXFxsZyBf2xGefHQ51EzheZ99b8+eaBljbQ9xQyqdrMRguC3YQa6YLz5AF1cm7KL3AT3/x78vLyAjIF3B8nf3fslTASm8Ue0LqdbgefH59zTublVEEdORg0aBCDBg0KZhNERERERKTC+bBaTERERETkjJmmG9MM7EPJAl3fL3VOLEgWEREREZHgU+dAREREREQATSsSERERkdrGNMEd4D159JwDERERERGpSTRyICIiIiK1i2kCGjnwRSMHIiIiIiICqHMgIiIiIiIVNK1IRERERGoXtxuMAD93QM85EBERERGRmkQjByIiIiJSu2hBcpU0ciAiIiIiIoBGDkRERESkljHdbswArzkwteZARERERERqEnUOREREREQE0LQiEREREalttCC5Sho5EBERERERQCMHIiIiIlLbuE0wNHLgi0YOREREREQEUOdAREREREQqaFqRiIiIiNQupgkE+LkDmlYkIiIiIiI1iUYORERERKRWMd0mZoAXJJsaORARERERkZpEIwciIiIiUruYbgK/5iDA9f1CGjkQERERERFAnQMREREREamgaUUiIiIiUqtoQXLVNHIgIiIiInIOeuGFF2jcuDFhYWF07dqV1atX/2T8O++8Q8uWLQkLC6Ndu3Z88sknftepzoGIiIiI1C6mOziHH+bNm8e4ceN47LHHWL9+PR06dKB3794cPXrUZ/yKFSsYMmQIo0aNYsOGDQwYMIABAwawefNmv+pV50BERERE5Bzz3HPPcccddzBy5Ehat27Nyy+/TEREBLNnz/YZP2vWLK655hrGjx9Pq1atmDp1Kp06deJvf/ubX/XW6DUHJ+duOSmHmjGNS6TWcJWVBrsJNU55kSPYTahxCgpqxtaA55p8m/Im1Se/8MTfr3N5jn0wfnd0Ug5Afn6+V3loaCihoaFeZQ6Hg3Xr1jFx4kRPmcVioVevXqxcudLn9VeuXMm4ceO8ynr37s2CBQv8ameN7hwUFBQAsAz/51OJSDV7eWGwW1DjbH052C2oeT4KdgNqLN/TEkTOpoKCAmJjY4PdDC92u52UlBSWHQnO745RUVE0aNDAq+yxxx5jypQpXmVZWVm4XC6Sk5O9ypOTk9m+fbvPax85csRn/JEjR/xqY43uHNSrV48DBw4QHR2NYRjBbo6X/Px8GjRowIEDB4iJiQl2c2oE5cx/ypn/lDP/KWf+U878p5z571zNmWmaFBQUUK9evWA35TRhYWHs3bsXhyM4I7WmaZ72O+uPRw2CrUZ3DiwWCxdccEGwm/GTYmJizql/sDWBcuY/5cx/ypn/lDP/KWf+U878dy7m7FwbMThVWFgYYWFhwW7GT0pKSsJqtZKZmelVnpmZSUpKis9zUlJS/IqvihYki4iIiIicQ+x2O507d+aLL77wlLndbr744gvS09N9npOenu4VD/DZZ59VGV+VGj1yICIiIiJyPho3bhzDhw/n4osv5pJLLmHmzJkUFRUxcuRIAG699Vbq16/P008/DcCYMWPo0aMHzz77LNdeey1vv/02a9eu5ZVXXvGrXnUOqkloaCiPPfbYOTeP7FymnPlPOfOfcuY/5cx/ypn/lDP/KWfnt8GDB3Ps2DEmT57MkSNHuOiii1i0aJFn0fH+/fuxWConAXXr1o0333yTRx55hIcffphmzZqxYMEC2rZt61e9hnku7zMlIiIiIiIBozUHIiIiIiICqHMgIiIiIiIV1DkQERERERFAnQMREREREamgzoGInNeuuOIKxo4d63nduHFjZs6cGbT2nDRlyhQuuuiigNc7YsQIBgwY8Kuu8eqrrxIXF/eTMWfr/s5Ge0VE5MypcyAiv8qIESMwDAPDMAgJCaFJkyZMmDCB0tLSYDfNpzVr1nDnnXdW2/WXLFniyUdVx5IlS6qt/poiIyPDKyd2u520tDSefPJJTt1Eb9asWbz66que1z/u7ImIyNml5xyIyK92zTXXMGfOHMrLy1m3bh3Dhw/HMAz+9Kc/Bbtpp6lTp061Xr9bt24cPnzY83rMmDHk5+czZ84cT1lCQsIv6iA4HA7sdvvZaOY54/PPP6dNmzaUlZWxbNkybr/9dlJTUxk1ahQAsbGxQW6hiEjtopEDEfnVQkNDSUlJoUGDBgwYMIBevXrx2Wefed4vKyvj3nvvpW7duoSFhXHppZeyZs0az/u+pqksWLAAwzA8r09OU3nttddo3LgxsbGx3HzzzRQUFHhiioqKuPXWW4mKiiI1NZVnn332tLb+eFqRYRj885//5IYbbiAiIoJmzZrx4Ycfep3z4Ycf0qxZM8LCwrjyyiuZO3cuhmGQm5t72vXtdjspKSmeIzw83JOfk8epv+D/1P1cccUVjB49mrFjx5KUlETv3r0B2Lx5M3369CEqKork5GSGDRtGVlaW57x3332Xdu3aER4eTmJiIr169aKoqMirnTNmzCA1NZXExET+8Ic/UF5e7nkvJyeHW2+9lfj4eCIiIujTpw+7du067V5P9cwzz5CcnEx0dDSjRo0645GjxMREUlJSaNSoEUOHDqV79+6sX7/e8/6p04pGjBjB0qVLmTVrlmfEISMjg5ycHIYOHUqdOnUIDw+nWbNmXp0xERE5c+ociMhZtXnzZlasWOH1C/CECRN47733mDt3LuvXryctLY3evXtz/Phxv669Z88eFixYwEcffcRHH33E0qVLeeaZZzzvjx8/nqVLl7Jw4UL++9//smTJEq9fNKvy+OOPM2jQIL799lv69u3L0KFDPW3bu3cvN910EwMGDGDTpk3cddddTJo0ya92/9L7AZg7dy52u53ly5fz8ssvk5uby1VXXUXHjh1Zu3YtixYtIjMzk0GDBgFw+PBhhgwZwm233ca2bdtYsmQJv/vd77ym6nz11Vfs2bOHr776irlz5/Lqq696Td0ZMWIEa9eu5cMPP2TlypWYpknfvn29OhCnmj9/PlOmTOGpp55i7dq1pKam8uKLL/qdj7Vr17Ju3Tq6du3q8/1Zs2aRnp7OHXfcweHDhzl8+DANGjTg0UcfZevWrXz66ads27aNl156iaSkJL/rFxERwBQR+RWGDx9uWq1WMzIy0gwNDTUB02KxmO+++65pmqZZWFhohoSEmG+88YbnHIfDYdarV8+cPn26aZqmOWfOHDM2Ntbruh988IF56o+oxx57zIyIiDDz8/M9ZePHjze7du1qmqZpFhQUmHa73Zw/f77n/ezsbDM8PNwcM2aMp6xRo0bmX/7yF89rwHzkkUc8rwsLC03A/PTTT03TNM0HH3zQbNu2rVfbJk2aZAJmTk7OGeWnf//+p5X/3P2Ypmn26NHD7Nixo9d5U6dONa+++mqvsgMHDpiAuWPHDnPdunUmYGZkZFTZnkaNGplOp9NTNnDgQHPw4MGmaZrmzp07TcBcvny55/2srCwzPDzck9sff7/S09PNe+65x6uerl27mh06dPDZBtM0zb1795qAGR4ebkZGRpohISEmYN55552ntffU/PXo0cPr+2maptmvXz9z5MiRVdYlIiJnTiMHIvKrXXnllWzcuJFVq1YxfPhwRo4cyY033gic+HS8vLyc7t27e+JDQkK45JJL2LZtm1/1NG7cmOjoaM/r1NRUjh496qnH4XB4feqckJBAixYtfva67du393wdGRlJTEyM57o7duygS5cuXvGXXHKJX+2uyk/dz0mdO3f2er1p0ya++uoroqKiPEfLli2BEzno0KEDPXv2pF27dgwcOJB//OMf5OTkeF2jTZs2WK1Wn/Vu27YNm83mlcfExERatGhR5fdr27Ztp33an56efkY5mDdvHhs3bmTTpk3Mnz+fhQsX8tBDD53RuSfdfffdvP3221x00UVMmDCBFStW+HW+iIhUUudARH61yMhI0tLS6NChA7Nnz2bVqlX861//OuPzLRaL17QXwOcUlpCQEK/XhmHgdrt/WaMDcN2zUW9kZKTX68LCQvr168fGjRu9jl27dnH55ZdjtVr57LPP+PTTT2ndujV//etfadGiBXv37vWr3kBp0KABaWlptGrVioEDBzJ27FieffZZv3a76tOnD/v27eO+++7jhx9+oGfPnjzwwAPV2GoRkfOXOgciclZZLBYefvhhHnnkEUpKSmjatKlnzvxJ5eXlrFmzhtatWwMndhAqKCjwWjS7ceNGv+pt2rQpISEhrFq1ylOWk5PDzp07f9X9tGjRgrVr13qVnbqYOtA6derEli1baNy4MWlpaV7HyY6EYRh0796dxx9/nA0bNmC32/nggw/O6PqtWrXC6XR65TE7O5sdO3Z4vl++zjk1HuCbb775RfdntVpxOp04HA6f79vtdlwu12nlderUYfjw4bz++uvMnDmTV1555RfVLyJS26lzICJn3cCBA7FarbzwwgtERkZy9913M378eBYtWsTWrVu54447KC4u9mxX2bVrVyIiInj44YfZs2cPb775ptcC2TMRFRXFqFGjGD9+PF9++SWbN29mxIgRWCy/7sfcXXfdxfbt23nwwQfZuXMn8+fP97Tt1N2UAuUPf/gDx48fZ8iQIaxZs4Y9e/awePFiRo4cicvlYtWqVZ6Fwfv37+f999/n2LFjtGrV6oyu36xZM/r3788dd9zBsmXL2LRpE7///e+pX78+/fv393nOmDFjmD17NnPmzGHnzp089thjbNmy5Yzqy87O5siRIxw8eJBPP/2UWbNmceWVVxITE+MzvnHjxqxatYqMjAyysrJwu91MnjyZhQsXsnv3brZs2cJHH310xvcrIiLe1DkQkbPOZrMxevRopk+fTlFREc888ww33ngjw4YNo1OnTuzevZvFixcTHx8PnFgb8Prrr/PJJ5/Qrl073nrrLaZMmeJ3vX/+85+57LLL6NevH7169eLSSy89bc6+v5o0acK7777L+++/T/v27XnppZc8uxWFhob+qmv/EvXq1WP58uW4XC6uvvpq2rVrx9ixY4mLi8NisRATE8P//vc/+vbtS/PmzXnkkUd49tln6dOnzxnXMWfOHDp37sx1111Heno6pmnyySefnDYd6aTBgwfz6KOPMmHCBDp37sy+ffu4++67z6iuXr16kZqaSuPGjbnzzjvp27cv8+bNqzL+gQcewGq10rp1a+rUqcP+/fux2+1MnDiR9u3be6ZWvf3222d8vyIiUskwfzzRV0REftK0adN4+eWXOXDgQLCbIiIiclbpCckiIj/jxRdfpEuXLiQmJrJ8+XL+/Oc/M3r06GA3S0RE5KxT50BE5Gfs2rWLJ598kuPHj9OwYUPuv/9+Jk6cGOxmiYiInHWaViQiIiIiIoAWJIuIiIiISAV1DkREREREBFDnQEREREREKqhzICIiIiIigDoHIiIiIiJSQZ0DEREREREB1DkQEREREZEK6hyIiIiIiAgA/w96KF41FkHQtAAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import torch\n", - "from concrete.ml.torch.compile import build_quantized_module, compile_torch_model\n", - "\n", - "def run_experiment(fhe_mode=\"disable\"):\n", - " # Assuming 'images' and 'resnet18' are already defined\n", - " resnet18.eval() # Set the model to inference mode\n", - "\n", - " # Define ranges for n_bits and rounding_threshold_bits\n", - " n_bits_range = range(2, 10) # 2 to 10\n", - " rounding_threshold_bits_range = list(range(2, 9)) + [None] # 2 to 9 and None\n", - "\n", - " # Initialize a dictionary to store accuracies for each combination\n", - " accuracies = {}\n", - "\n", - " # Loop over the ranges of n_bits and rounding_threshold_bits\n", - " for n_bits in n_bits_range:\n", - " for rounding_threshold_bits in rounding_threshold_bits_range:\n", - " compile_method = build_quantized_module if fhe_mode == \"disable\" else compile_torch_model\n", - " q_module = compile_method(\n", - " resnet18,\n", - " torch_inputset=images,\n", - " n_bits=n_bits,\n", - " rounding_threshold_bits=rounding_threshold_bits,\n", - " )\n", - "\n", - " with torch.no_grad():\n", - " outputs_fhe = q_module.forward(images.detach().numpy(), fhe=fhe_mode)\n", - " probabilities_fhe = torch.nn.functional.softmax(torch.from_numpy(outputs_fhe), dim=-1)\n", - " outputs = resnet18(images)\n", - " probabilities = torch.nn.functional.softmax(outputs, dim=-1)\n", - "\n", - " # Calculate and store accuracy\n", - " fhe_accuracy_vs_fp32 = (\n", - " (probabilities_fhe.argmax(-1) == probabilities.argmax(-1)).float().mean().item()\n", - " )\n", - " accuracies[(n_bits, rounding_threshold_bits)] = fhe_accuracy_vs_fp32\n", - "\n", - " # Convert accuracies to a 2D array for plotting\n", - " accuracy_matrix = np.zeros((len(n_bits_range), len(rounding_threshold_bits_range)))\n", - " for i, n_bits in enumerate(n_bits_range):\n", - " for j, rounding_threshold_bits in enumerate(rounding_threshold_bits_range):\n", - " accuracy_matrix[i, j] = accuracies[(n_bits, rounding_threshold_bits)]\n", - "\n", - " # Plotting\n", - " fig, ax = plt.subplots(figsize=(10, 8))\n", - " cax = ax.matshow(accuracy_matrix, cmap=\"viridis\")\n", - " fig.colorbar(cax)\n", - "\n", - " # Set ticks and labels\n", - " ax.set_xticklabels([\"\"] + rounding_threshold_bits_range, rotation=45)\n", - " ax.set_yticklabels([\"\"] + list(n_bits_range))\n", - " ax.set_xlabel(\"Rounding Threshold Bits\")\n", - " ax.set_ylabel(\"N Bits\")\n", - " ax.set_title(f\"Accuracy of FHE ({fhe_mode}) vs. FP32\")\n", - "\n", - " # Annotate each cell with the accuracy percentage\n", - " for i in range(len(n_bits_range)):\n", - " for j in range(len(rounding_threshold_bits_range)):\n", - " ax.text(j, i, f\"{accuracy_matrix[i, j]:.2f}\", va=\"center\", ha=\"center\", color=\"white\")\n", - "\n", - " plt.show()\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "run_experiment(fhe_mode=\"disable\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "run_experiment(fhe_mode=\"simulate\")" - ] - } - ], - "metadata": { - "execution": { - "timeout": 10800 - }, - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.18" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/use_case_examples/resnet/run_resnet18_fhe.py b/use_case_examples/resnet/run_resnet18_fhe.py index 2a25ff072..ecf5c12d5 100644 --- a/use_case_examples/resnet/run_resnet18_fhe.py +++ b/use_case_examples/resnet/run_resnet18_fhe.py @@ -1,153 +1,115 @@ import argparse +import json +import time +from pathlib import Path + import torch +from concrete.fhe import Configuration from resnet import ResNet18_Weights, resnet18_custom +from utils_resnet import TinyImageNetProcessor +import numpy as np + from concrete.ml.torch.compile import compile_torch_model -import requests -from io import BytesIO -from PIL import Image -from torchvision import transforms -import time -from pathlib import Path parser = argparse.ArgumentParser(description="Run ResNet18 model with FHE execution.") -parser.add_argument('--run_fhe', action='store_true', help="Run the actual FHE execution.") +parser.add_argument("--run_fhe", action="store_true", help="Run the actual FHE execution.") +parser.add_argument( + "--export_statistics", action="store_true", help="Export the circuit statistics." +) args = parser.parse_args() - BASE_DIR = Path(__file__).resolve().parent # Load the ResNet18 model with pretrained weights resnet18 = resnet18_custom(weights=ResNet18_Weights.IMAGENET1K_V1) -# Use ImageNet classes file to map class names to indices -imagenet_classes_path = BASE_DIR / "imagenet_classes.txt" -with open(imagenet_classes_path, "r") as f: - class_to_index = {cls: idx for idx, cls in enumerate([line.strip() for line in f.readlines()])} - -# Define image transformation -transform = transforms.Compose([ - transforms.Resize(256), - transforms.CenterCrop(224), - transforms.ToTensor(), - transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), -]) - -# TODO: have a more automated way to grab N images from the net. -# Download an example image from the web -image_urls = [ - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01443537_goldfish.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01614925_bald_eagle.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01697457_African_crocodile.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01592084_chickadee.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01601694_water_ouzel.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01739381_vine_snake.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01806567_quail.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n01917289_brain_coral.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02077923_sea_lion.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02051845_pelican.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02110185_Siberian_husky.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02165456_ladybug.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02325366_wood_rabbit.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02391049_zebra.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02481823_chimpanzee.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02510455_giant_panda.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02643566_lionfish.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02787622_banjo.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02817516_bearskin.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02871525_bookshop.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02930766_cab.JPEG", - "https://raw.githubusercontent.com/EliSchwartz/imagenet-sample-images/master/n02974003_car_wheel.JPEG", -] - -images, labels = [], [] -for image_url in image_urls: - class_name = '_'.join(image_url.split('/')[-1].split('.')[0].split('_')[1:]).replace('_', ' ') - if class_name in class_to_index: - response = requests.get(image_url) - img = Image.open(BytesIO(response.content)) - images.append(transform(img)) - labels.append(class_to_index[class_name]) - -# Stack images to create a mini batch -images = torch.stack(images) -labels = torch.tensor(labels) - -# Function to compute accuracy -def compute_accuracy(predicted, labels): - correct = (predicted == labels).sum().item() - total = labels.size(0) - return 100 * correct / total - -# Function to compute top-k accuracy -def compute_topk_accuracy(outputs, labels, topk=5): - _, topk_predicted = torch.topk(outputs, topk, dim=1) - correct_topk = sum([labels[i] in topk_predicted[i] for i in range(len(labels))]) - total = labels.size(0) - return 100 * correct_topk / total +CALIBRATION_SAMPLES = 10 +NUM_TEST_SAMPLES = 100 + +imagenet_classes_path = BASE_DIR / "LOC_synset_mapping.txt" +processor = TinyImageNetProcessor(imagenet_classes_path) +all_images, all_labels = processor.get_image_label_tensors(num_samples=NUM_TEST_SAMPLES + CALIBRATION_SAMPLES) + +# Split into calibration and test sets +calib_images, _ = all_images[:CALIBRATION_SAMPLES], all_labels[:CALIBRATION_SAMPLES] +images, labels = all_images[CALIBRATION_SAMPLES:], all_labels[CALIBRATION_SAMPLES:] # Forward pass through the model to get the predictions with torch.no_grad(): outputs = resnet18(images) - _, predicted = torch.max(outputs, 1) # Compute and print accuracy -accuracy = compute_accuracy(predicted, labels) -print(f"Accuracy of the ResNet18 model on the images: {accuracy:.4f}%") +accuracy = processor.compute_accuracy(outputs, labels) +print(f"Accuracy of the ResNet18 model on the images: {accuracy*100:.2f}%") + +topk_accuracy = processor.compute_topk_accuracy(outputs, labels, k=5) +print(f"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy*100:.2f}%") -topk_accuracy = compute_topk_accuracy(outputs, labels, topk=5) -print(f"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy:.4f}%") +# Enable TLU fusing to optimize the number of TLUs in the residual connections +config = Configuration(enable_tlu_fusing=True, print_tlu_fusing=False) # Compile the model print("Compiling the model...") q_module = compile_torch_model( resnet18, torch_inputset=images, - n_bits={"model_inputs": 8, "op_inputs": 7, "op_weights": 6, "model_outputs": 8}, - rounding_threshold_bits={"n_bits": 7, "method":"APPROXIMATE"}, - p_error=0.005 + n_bits={"model_inputs": 8, "op_inputs": 6, "op_weights": 6, "model_outputs": 8}, + rounding_threshold_bits={"n_bits": 7, "method": "APPROXIMATE"}, + p_error=0.05, + configuration=config, ) + +if args.export_statistics: + open("resnet.graph", "w").write(q_module.fhe_circuit.graph.format(show_locations=True)) + open("resnet.mlir", "w").write(q_module.fhe_circuit.mlir) + + def make_serializable(obj): + if isinstance(obj, dict): + return {str(key): make_serializable(value) for key, value in obj.items()} + elif isinstance(obj, list): + return [make_serializable(element) for element in obj] + elif isinstance(obj, tuple): + return tuple(make_serializable(element) for element in obj) + elif isinstance(obj, (str, int, float, bool, type(None))): + return obj + else: + return str(obj) + + statistics = make_serializable(q_module.fhe_circuit.statistics) + + with open("resnet_statistics.json", "w") as f: + json.dump(statistics, f) + print("Model compiled successfully.") # Forward pass with FHE disabled with torch.no_grad(): outputs_disable = q_module.forward(images.detach().numpy(), fhe="disable") - _, predicted_disable = torch.max(torch.from_numpy(outputs_disable), 1) - -# Compute accuracy -fhe_accuracy_vs_fp32 = (predicted_disable == predicted).float().mean().item() -print(f"Quantized Model Fidelity with FP32: {fhe_accuracy_vs_fp32:.4f}%") # Compute and print accuracy for quantized model -accuracy = compute_accuracy(predicted_disable, labels) -print(f"Quantized Model Accuracy of the FHEResNet18 on the images: {accuracy:.4f}%") -topk_accuracy = compute_topk_accuracy(torch.from_numpy(outputs_disable), labels, topk=5) -print(f"Quantized Model Top-5 Accuracy of the FHEResNet18 on the images: {topk_accuracy:.4f}%") +accuracy = processor.compute_accuracy(torch.from_numpy(outputs_disable), labels) +print(f"Quantized Model Accuracy of the FHEResNet18 on the images: {accuracy*100:.2f}%") +topk_accuracy = processor.compute_topk_accuracy(torch.from_numpy(outputs_disable), labels, k=5) +print(f"Quantized Model Top-5 Accuracy of the FHEResNet18 on the images: {topk_accuracy*100:.2f}%") # Forward pass with FHE simulation with torch.no_grad(): outputs_simulate = q_module.forward(images.detach().numpy(), fhe="simulate") - _, predicted_simulate = torch.max(torch.from_numpy(outputs_simulate), 1) # Compute and print accuracy for FHE simulation -accuracy = compute_accuracy(predicted_simulate, labels) -print(f"FHE Simulation Accuracy of the FHEResNet18 on the images: {accuracy:.4f}%") -topk_accuracy = compute_topk_accuracy(torch.from_numpy(outputs_simulate), labels, topk=5) -print(f"FHE Simulation Top-5 Accuracy of the FHEResNet18 on the images: {topk_accuracy:.4f}%") +accuracy = processor.compute_accuracy(torch.from_numpy(outputs_simulate), labels) +print(f"FHE Simulation Accuracy of the FHEResNet18 on the images: {accuracy*100:.2f}%") +topk_accuracy = processor.compute_topk_accuracy(torch.from_numpy(outputs_simulate), labels, k=5) +print(f"FHE Simulation Top-5 Accuracy of the FHEResNet18 on the images: {topk_accuracy*100:.2f}%") if args.run_fhe: # Run FHE execution and measure time on a single image - q_module.fhe_circuit.keygen() + # q_module.fhe_circuit.keygen() single_image = images[0:1].detach().numpy() start = time.time() - fhe_output = q_module.forward(single_image, fhe="simulate") + fhe_output = q_module.forward(single_image, fhe="execute") end = time.time() print(f"Time taken for one FHE execution: {end - start:.4f} seconds") - print(f"FHE execution output: {fhe_output}") - - # Run FHE simulation on the same single image - fhe_sim_output = q_module.forward(single_image, fhe="simulate") - print(f"FHE simulation output: {fhe_sim_output}") - print(f"Actual label: {labels[0].item()}") else: print("FHE execution was not run. Use --run_fhe to enable it.") \ No newline at end of file diff --git a/use_case_examples/resnet/utils_resnet.py b/use_case_examples/resnet/utils_resnet.py new file mode 100644 index 000000000..d48619e7d --- /dev/null +++ b/use_case_examples/resnet/utils_resnet.py @@ -0,0 +1,111 @@ +import torch +from datasets import load_dataset +from torchvision import transforms + + +class TinyImageNetProcessor: + """Processor for Tiny ImageNet dataset to align it with ImageNet labels for model evaluation. + + It preprocesses images to ImageNet standards, maps labels between Tiny ImageNet and ImageNet, + and evaluates model predictions with these mappings. + """ + + def __init__(self, imagenet_classes_path): + """Initializes the processor with the path to ImageNet classes and loads the dataset. + + Args: + imagenet_classes_path (str): Path to the file containing ImageNet class labels. + """ + self.imagenet_classes_path = imagenet_classes_path + self.dataset = load_dataset("zh-plus/tiny-imagenet") + self.target_imagenet_to_tiny, self.target_tiny_to_imagenet = self._load_and_map_labels() + + def _load_and_map_labels(self): + """Loads ImageNet labels from a file and creates mappings with the dataset labels. + + Returns: + tuple: Two dictionaries for label mapping between ImageNet and Tiny ImageNet. + """ + try: + with open(self.imagenet_classes_path, "r") as file: + lines = file.readlines() + except IOError: + raise FileNotFoundError("The ImageNet classes file was not found.") + + label_to_imagenet_idx = {line.split()[0]: idx for idx, line in enumerate(lines)} + tiny_labels = {label: idx for idx, label in enumerate(self.dataset["train"].features["label"].names)} + + common_labels = set(label_to_imagenet_idx.keys()) & set(tiny_labels.keys()) + imagenet_to_tiny = {label_to_imagenet_idx[label]: tiny_labels[label] for label in common_labels} + tiny_to_imagenet = {v: k for k, v in imagenet_to_tiny.items()} + + return imagenet_to_tiny, tiny_to_imagenet + + def get_image_label_tensors(self, num_samples=100): + """Fetches and preprocesses a specified number of image samples. + + Args: + num_samples (int): Number of samples to process. + + Returns: + tuple: Tensors of images and their corresponding labels. + """ + transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) + ]) + + rgb_samples = [] + attempt = 0 + # Shuffle and select a subset of valid samples to find RGB images with valid labels + while len(rgb_samples) < num_samples and attempt < 10 * num_samples: + valid_samples = self.dataset["valid"].shuffle(seed=attempt).select(range(num_samples * 2)) + for sample in valid_samples: + if sample["image"].mode == "RGB" and sample["label"] in self.target_imagenet_to_tiny.values(): + rgb_samples.append((transform(sample["image"]), sample["label"])) + if len(rgb_samples) == num_samples: + break + attempt += 1 + + images, labels = zip(*rgb_samples) if rgb_samples else ([], []) + return torch.stack(images), torch.tensor(labels) + + def compute_accuracy(self, outputs, labels): + """Computes the accuracy of model outputs compared to true labels. + + Args: + outputs (torch.Tensor): Model outputs. + labels (torch.Tensor): True labels. + + Returns: + float: Accuracy metric. + """ + imagenet_labels = torch.tensor([self.target_tiny_to_imagenet[label.item()] for label in labels]) + relevant_indices = [self.target_tiny_to_imagenet[label.item()] for label in labels] + filtered_outputs = outputs[:, relevant_indices] + + predicted_labels = torch.tensor([relevant_indices[idx] for idx in filtered_outputs.argmax(dim=-1)]) + return (predicted_labels == imagenet_labels).float().mean().item() + + def compute_topk_accuracy(self, outputs, labels, k=5): + """Computes top-k accuracy of the model outputs. + + Args: + outputs (torch.Tensor): Model outputs. + labels (torch.Tensor): True labels. + k (int): Top k predictions to consider. + + Returns: + float: Top-k accuracy metric. + """ + imagenet_labels = torch.tensor([self.target_tiny_to_imagenet[label.item()] for label in labels]) + relevant_indices = [self.target_tiny_to_imagenet[label.item()] for label in labels] + filtered_outputs = outputs[:, relevant_indices] + + topk_preds = filtered_outputs.topk(k, dim=-1).indices + topk_labels = torch.tensor([[relevant_indices[pred] for pred in preds] for preds in topk_preds]) + + correct = sum(imagenet_labels[i] in topk_labels[i] for i in range(len(imagenet_labels))) + return correct / len(imagenet_labels) From 975aaa8f8b7efdd7a09338667e355d4a8b16f666 Mon Sep 17 00:00:00 2001 From: jfrery Date: Wed, 19 Jun 2024 09:45:37 +0200 Subject: [PATCH 4/9] chore: fix pcc + add resnet18 CI test --- .../workflows/run_one_use_cases_example.yaml | 1 + src/concrete/ml/quantization/post_training.py | 12 +- use_case_examples/resnet/Makefile | 5 + use_case_examples/resnet/README.md | 35 ++-- use_case_examples/resnet/resnet.py | 7 +- use_case_examples/resnet/run_resnet18_fhe.py | 155 ++++++++++-------- use_case_examples/resnet/utils_resnet.py | 63 ++++--- 7 files changed, 168 insertions(+), 110 deletions(-) create mode 100644 use_case_examples/resnet/Makefile diff --git a/.github/workflows/run_one_use_cases_example.yaml b/.github/workflows/run_one_use_cases_example.yaml index d2ddc8249..8c6db8728 100644 --- a/.github/workflows/run_one_use_cases_example.yaml +++ b/.github/workflows/run_one_use_cases_example.yaml @@ -19,6 +19,7 @@ on: - federated_learning - hybrid_model - llm + - resnet - sentiment_analysis_with_transformer - titanic # --- refresh_use_cases_list.py: refresh list of use cases currently available [END] --- diff --git a/src/concrete/ml/quantization/post_training.py b/src/concrete/ml/quantization/post_training.py index 9ef020c74..456d839bd 100644 --- a/src/concrete/ml/quantization/post_training.py +++ b/src/concrete/ml/quantization/post_training.py @@ -177,12 +177,12 @@ def get_n_bits_dict(n_bits: Union[int, Dict[str, int]]) -> Dict[str, int]: n_bits_dict.update(n_bits) - # assert_true( - # n_bits_dict["model_outputs"] >= n_bits_dict["op_inputs"], - # "Using fewer bits to represent the model_outputs than the op inputs is not " - # f"recommended. Got model_outputs: {n_bits_dict['model_outputs']} and op_inputs: " - # f"{n_bits_dict['op_inputs']}", - # ) + assert_true( + n_bits_dict["model_outputs"] >= n_bits_dict["op_inputs"], + "Using fewer bits to represent the model_outputs than the op inputs is not " + f"recommended. Got model_outputs: {n_bits_dict['model_outputs']} and op_inputs: " + f"{n_bits_dict['op_inputs']}", + ) return n_bits_dict diff --git a/use_case_examples/resnet/Makefile b/use_case_examples/resnet/Makefile new file mode 100644 index 000000000..afcc5fe01 --- /dev/null +++ b/use_case_examples/resnet/Makefile @@ -0,0 +1,5 @@ +run_example: bench_resnet + +bench_resnet: + @python run_resnet18_fhe.py + diff --git a/use_case_examples/resnet/README.md b/use_case_examples/resnet/README.md index 572102756..0002f6f7a 100644 --- a/use_case_examples/resnet/README.md +++ b/use_case_examples/resnet/README.md @@ -8,7 +8,6 @@ This project executes the ResNet18 image classification model using Fully Homomo The ResNet18 model is adapted from torchvision the original https://github.com/pytorch/vision/blob/main/torchvision/models/resnet.py where the adaptive average pooling layer `AdaptiveAvgPool2d` (not yet supported by Concrete ML) is replaced with a standard `AvgPool2d` layer as follows: - ```diff - self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.avgpool = nn.AvgPool2d(kernel_size=7, stride=1, padding=0) @@ -20,17 +19,18 @@ The rest is left unchanged. The model is evaluated on images from [the Tiny ImageNet dataset](https://huggingface.co/datasets/zh-plus/tiny-imagenet). -The `TinyImageNetProcessor` class in `utils_resnet.py` preprocesses the Tiny ImageNet dataset and aligns it with ImageNet labels for model evaluation. +The `TinyImageNetProcessor` class in `utils_resnet.py` preprocesses the Tiny ImageNet data-set and aligns it with ImageNet labels for model evaluation. ## Usage -1. Install a virtual python environment and activate it: +1. Install a virtual Python environment and activate it: ```bash python -m venv venv source venv/bin/activate ``` -2. Install concrete-ml: + +2. Install Concrete ML: ```bash pip install concrete-ml @@ -42,7 +42,6 @@ pip install concrete-ml pip install -r requirements.txt ``` - 4. Run the script: ```bash @@ -55,7 +54,6 @@ Example of output when running the script: python resnet_fhe.py --run_fhe ``` - ``` Accuracy of the ResNet18 model on the images: 56.00% Top-5 Accuracy of the ResNet18 model on the images: 82.00% @@ -68,9 +66,24 @@ FHE Simulation Top-5 Accuracy of the FHEResNet18 on the images: 75.00% Time taken for one FHE execution: 5482.5433 seconds ``` -## Timing +## Timings and Accuracy in FHE + +CPU machine: 196 cores CPU machine (hp7c from AWS) +GPU machine: TBD + +Summary of the accuracy evaluation on tinyImageNet (100 images): + +| w&a bits | p_error | Accuracy | Top-5 Accuracy | Runtime (hours) | Device | +| -------- | ------- | -------- | -------------- | --------------- | ------ | +| 6/6 | 0.05 | 50% | 75% | **1.52** | CPU | +| 6/6 | 0.05 | 50% | 75% | TBD | GPU | +| 6/7 | 0.05 | 53% | 76% | 2.2 | CPU | +| 6/7 | 0.005 | 57% | 74% | 5.2 | CPU | + +Note that the original model in fp32 achieved 56% accuracy and 82% top-5 accuracy. + +Recommended configuration: 6/6 with p_error = 0.05 + +6/6 `n_bits` configuration: {"model_inputs": 8, "op_inputs": 6, "op_weights": 6, "model_outputs": 8} -| Device | Time (seconds) | -|--------|----------------| -| CPU | 5482.5433 | -| GPU | TBD | +6/7 `n_bits` configuration: {"model_inputs": 8, "op_inputs": 7, "op_weights": 6, "model_outputs": 8} diff --git a/use_case_examples/resnet/resnet.py b/use_case_examples/resnet/resnet.py index 69ee83dab..18dfa48fe 100644 --- a/use_case_examples/resnet/resnet.py +++ b/use_case_examples/resnet/resnet.py @@ -1,3 +1,8 @@ +""" +This file is a modification of the original ResNet implementation from: +https://github.com/pytorch/vision/blob/bf01bab6125c5f1152e4f336b470399e52a8559d/torchvision/models/resnet.py +""" + from functools import partial from typing import Any, Callable, List, Optional, Type, Union @@ -702,7 +707,7 @@ def resnet18_custom( Args: weights (:class:`~torchvision.models.ResNet18_Weights`, optional): The - pretrained weights to use. See + pre-trained weights to use. See :class:`~torchvision.models.ResNet18_Weights` below for more details, and possible values. By default, no pre-trained weights are used. diff --git a/use_case_examples/resnet/run_resnet18_fhe.py b/use_case_examples/resnet/run_resnet18_fhe.py index ecf5c12d5..ff3e75356 100644 --- a/use_case_examples/resnet/run_resnet18_fhe.py +++ b/use_case_examples/resnet/run_resnet18_fhe.py @@ -7,58 +7,54 @@ from concrete.fhe import Configuration from resnet import ResNet18_Weights, resnet18_custom from utils_resnet import TinyImageNetProcessor -import numpy as np from concrete.ml.torch.compile import compile_torch_model -parser = argparse.ArgumentParser(description="Run ResNet18 model with FHE execution.") -parser.add_argument("--run_fhe", action="store_true", help="Run the actual FHE execution.") -parser.add_argument( - "--export_statistics", action="store_true", help="Export the circuit statistics." -) -args = parser.parse_args() BASE_DIR = Path(__file__).resolve().parent - -# Load the ResNet18 model with pretrained weights -resnet18 = resnet18_custom(weights=ResNet18_Weights.IMAGENET1K_V1) - CALIBRATION_SAMPLES = 10 NUM_TEST_SAMPLES = 100 -imagenet_classes_path = BASE_DIR / "LOC_synset_mapping.txt" -processor = TinyImageNetProcessor(imagenet_classes_path) -all_images, all_labels = processor.get_image_label_tensors(num_samples=NUM_TEST_SAMPLES + CALIBRATION_SAMPLES) -# Split into calibration and test sets -calib_images, _ = all_images[:CALIBRATION_SAMPLES], all_labels[:CALIBRATION_SAMPLES] -images, labels = all_images[CALIBRATION_SAMPLES:], all_labels[CALIBRATION_SAMPLES:] +def load_model(): + # Load the ResNet18 model with pre-trained weights + return resnet18_custom(weights=ResNet18_Weights.IMAGENET1K_V1) + -# Forward pass through the model to get the predictions -with torch.no_grad(): - outputs = resnet18(images) +def load_data(): + imagenet_classes_path = BASE_DIR / "LOC_synset_mapping.txt" + processor = TinyImageNetProcessor(imagenet_classes_path) + all_images, all_labels = processor.get_image_label_tensors( + num_samples=NUM_TEST_SAMPLES + CALIBRATION_SAMPLES + ) + calib_images, _ = all_images[:CALIBRATION_SAMPLES], all_labels[:CALIBRATION_SAMPLES] + images, labels = all_images[CALIBRATION_SAMPLES:], all_labels[CALIBRATION_SAMPLES:] + return processor, calib_images, images, labels -# Compute and print accuracy -accuracy = processor.compute_accuracy(outputs, labels) -print(f"Accuracy of the ResNet18 model on the images: {accuracy*100:.2f}%") -topk_accuracy = processor.compute_topk_accuracy(outputs, labels, k=5) -print(f"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy*100:.2f}%") +def evaluate_model(model, processor, images, labels): + with torch.no_grad(): + outputs = model(images) + accuracy = processor.compute_accuracy(outputs, labels) + topk_accuracy = processor.compute_topk_accuracy(outputs, labels, k=5) + print(f"Accuracy of the ResNet18 model on the images: {accuracy*100:.2f}%") + print(f"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy*100:.2f}%") -# Enable TLU fusing to optimize the number of TLUs in the residual connections -config = Configuration(enable_tlu_fusing=True, print_tlu_fusing=False) -# Compile the model -print("Compiling the model...") -q_module = compile_torch_model( - resnet18, - torch_inputset=images, - n_bits={"model_inputs": 8, "op_inputs": 6, "op_weights": 6, "model_outputs": 8}, - rounding_threshold_bits={"n_bits": 7, "method": "APPROXIMATE"}, - p_error=0.05, - configuration=config, -) +def compile_model(model, images): + # Enable TLU fusing to optimize the number of TLUs in the residual connections + config = Configuration(enable_tlu_fusing=True, print_tlu_fusing=False) + print("Compiling the model...") + return compile_torch_model( + model, + torch_inputset=images, + n_bits={"model_inputs": 8, "op_inputs": 6, "op_weights": 6, "model_outputs": 8}, + rounding_threshold_bits={"n_bits": 7, "method": "APPROXIMATE"}, + p_error=0.05, + configuration=config, + ) -if args.export_statistics: + +def export_statistics(q_module): open("resnet.graph", "w").write(q_module.fhe_circuit.graph.format(show_locations=True)) open("resnet.mlir", "w").write(q_module.fhe_circuit.mlir) @@ -79,37 +75,64 @@ def make_serializable(obj): with open("resnet_statistics.json", "w") as f: json.dump(statistics, f) -print("Model compiled successfully.") - -# Forward pass with FHE disabled -with torch.no_grad(): - outputs_disable = q_module.forward(images.detach().numpy(), fhe="disable") -# Compute and print accuracy for quantized model -accuracy = processor.compute_accuracy(torch.from_numpy(outputs_disable), labels) -print(f"Quantized Model Accuracy of the FHEResNet18 on the images: {accuracy*100:.2f}%") -topk_accuracy = processor.compute_topk_accuracy(torch.from_numpy(outputs_disable), labels, k=5) -print(f"Quantized Model Top-5 Accuracy of the FHEResNet18 on the images: {topk_accuracy*100:.2f}%") - -# Forward pass with FHE simulation -with torch.no_grad(): - outputs_simulate = q_module.forward(images.detach().numpy(), fhe="simulate") - -# Compute and print accuracy for FHE simulation -accuracy = processor.compute_accuracy(torch.from_numpy(outputs_simulate), labels) -print(f"FHE Simulation Accuracy of the FHEResNet18 on the images: {accuracy*100:.2f}%") -topk_accuracy = processor.compute_topk_accuracy(torch.from_numpy(outputs_simulate), labels, k=5) -print(f"FHE Simulation Top-5 Accuracy of the FHEResNet18 on the images: {topk_accuracy*100:.2f}%") - -if args.run_fhe: - # Run FHE execution and measure time on a single image - # q_module.fhe_circuit.keygen() +def evaluate_model_cml(q_module, processor, images, labels, fhe): + assert fhe in ["disable", "simulate"], "fhe must be either 'disable' or 'simulate'" + + with torch.no_grad(): + outputs = q_module.forward(images.detach().numpy(), fhe=fhe) + accuracy = processor.compute_accuracy(torch.from_numpy(outputs), labels) + topk_accuracy = processor.compute_topk_accuracy(torch.from_numpy(outputs), labels, k=5) + if fhe == "simulate": + print("FHE simulation Accuracy of the FHEResNet18 on the images: " f"{accuracy*100:.2f}%") + print( + "FHE simulation Top-5 Accuracy of the FHEResNet18 on the images: " + f"{topk_accuracy*100:.2f}%" + ) + else: + print("Quantized Model Accuracy of the FHEResNet18 on the images: " f"{accuracy*100:.2f}%") + print( + "Quantized Model Top-5 Accuracy of the FHEResNet18 on the images: " + f"{topk_accuracy*100:.2f}%" + ) + + +def run_fhe_execution(q_module, images): single_image = images[0:1].detach().numpy() - start = time.time() - fhe_output = q_module.forward(single_image, fhe="execute") + _ = q_module.forward(single_image, fhe="execute") end = time.time() print(f"Time taken for one FHE execution: {end - start:.4f} seconds") -else: - print("FHE execution was not run. Use --run_fhe to enable it.") \ No newline at end of file + +def main(): + parser = argparse.ArgumentParser(description="Run ResNet18 model with FHE execution.") + parser.add_argument("--run_fhe", action="store_true", help="Run the actual FHE execution.") + parser.add_argument( + "--export_statistics", action="store_true", help="Export the circuit statistics." + ) + args = parser.parse_args() + + resnet18 = load_model() + processor, calib_images, images, labels = load_data() + + evaluate_model(resnet18, processor, images, labels) + + q_module = compile_model(resnet18, calib_images) + + if args.export_statistics: + export_statistics(q_module) + + print("Model compiled successfully.") + + evaluate_model_cml(q_module, processor, images, labels, fhe="disable") + evaluate_model_cml(q_module, processor, images, labels, fhe="simulate") + + if args.run_fhe: + run_fhe_execution(q_module, images) + else: + print("FHE execution was not run. Use --run_fhe to enable it.") + + +if __name__ == "__main__": + main() diff --git a/use_case_examples/resnet/utils_resnet.py b/use_case_examples/resnet/utils_resnet.py index d48619e7d..39aef5384 100644 --- a/use_case_examples/resnet/utils_resnet.py +++ b/use_case_examples/resnet/utils_resnet.py @@ -12,7 +12,7 @@ class TinyImageNetProcessor: def __init__(self, imagenet_classes_path): """Initializes the processor with the path to ImageNet classes and loads the dataset. - + Args: imagenet_classes_path (str): Path to the file containing ImageNet class labels. """ @@ -22,7 +22,7 @@ def __init__(self, imagenet_classes_path): def _load_and_map_labels(self): """Loads ImageNet labels from a file and creates mappings with the dataset labels. - + Returns: tuple: Two dictionaries for label mapping between ImageNet and Tiny ImageNet. """ @@ -33,42 +33,45 @@ def _load_and_map_labels(self): raise FileNotFoundError("The ImageNet classes file was not found.") label_to_imagenet_idx = {line.split()[0]: idx for idx, line in enumerate(lines)} - tiny_labels = {label: idx for idx, label in enumerate(self.dataset["train"].features["label"].names)} + tiny_labels = { + label: idx for idx, label in enumerate(self.dataset["train"].features["label"].names) + } common_labels = set(label_to_imagenet_idx.keys()) & set(tiny_labels.keys()) - imagenet_to_tiny = {label_to_imagenet_idx[label]: tiny_labels[label] for label in common_labels} + imagenet_to_tiny = { + label_to_imagenet_idx[label]: tiny_labels[label] for label in common_labels + } tiny_to_imagenet = {v: k for k, v in imagenet_to_tiny.items()} return imagenet_to_tiny, tiny_to_imagenet def get_image_label_tensors(self, num_samples=100): """Fetches and preprocesses a specified number of image samples. - + Args: num_samples (int): Number of samples to process. Returns: tuple: Tensors of images and their corresponding labels. """ - transform = transforms.Compose([ - transforms.Resize(256), - transforms.CenterCrop(224), - transforms.ToTensor(), - transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) - ]) + transform = transforms.Compose( + [ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), + ] + ) rgb_samples = [] - attempt = 0 - # Shuffle and select a subset of valid samples to find RGB images with valid labels - while len(rgb_samples) < num_samples and attempt < 10 * num_samples: - valid_samples = self.dataset["valid"].shuffle(seed=attempt).select(range(num_samples * 2)) - for sample in valid_samples: - if sample["image"].mode == "RGB" and sample["label"] in self.target_imagenet_to_tiny.values(): - rgb_samples.append((transform(sample["image"]), sample["label"])) - if len(rgb_samples) == num_samples: - break - attempt += 1 - + for sample in self.dataset["valid"].shuffle(seed=0): + if ( + sample["image"].mode == "RGB" + and sample["label"] in self.target_imagenet_to_tiny.values() + ): + rgb_samples.append((transform(sample["image"]), sample["label"])) + if len(rgb_samples) == num_samples: + break images, labels = zip(*rgb_samples) if rgb_samples else ([], []) return torch.stack(images), torch.tensor(labels) @@ -82,11 +85,15 @@ def compute_accuracy(self, outputs, labels): Returns: float: Accuracy metric. """ - imagenet_labels = torch.tensor([self.target_tiny_to_imagenet[label.item()] for label in labels]) + imagenet_labels = torch.tensor( + [self.target_tiny_to_imagenet[label.item()] for label in labels] + ) relevant_indices = [self.target_tiny_to_imagenet[label.item()] for label in labels] filtered_outputs = outputs[:, relevant_indices] - predicted_labels = torch.tensor([relevant_indices[idx] for idx in filtered_outputs.argmax(dim=-1)]) + predicted_labels = torch.tensor( + [relevant_indices[idx] for idx in filtered_outputs.argmax(dim=-1)] + ) return (predicted_labels == imagenet_labels).float().mean().item() def compute_topk_accuracy(self, outputs, labels, k=5): @@ -100,12 +107,16 @@ def compute_topk_accuracy(self, outputs, labels, k=5): Returns: float: Top-k accuracy metric. """ - imagenet_labels = torch.tensor([self.target_tiny_to_imagenet[label.item()] for label in labels]) + imagenet_labels = torch.tensor( + [self.target_tiny_to_imagenet[label.item()] for label in labels] + ) relevant_indices = [self.target_tiny_to_imagenet[label.item()] for label in labels] filtered_outputs = outputs[:, relevant_indices] topk_preds = filtered_outputs.topk(k, dim=-1).indices - topk_labels = torch.tensor([[relevant_indices[pred] for pred in preds] for preds in topk_preds]) + topk_labels = torch.tensor( + [[relevant_indices[pred] for pred in preds] for preds in topk_preds] + ) correct = sum(imagenet_labels[i] in topk_labels[i] for i in range(len(imagenet_labels))) return correct / len(imagenet_labels) From 21e84b9b56489e46e781a8f59f7ce272c55701cf Mon Sep 17 00:00:00 2001 From: jfrery Date: Thu, 20 Jun 2024 12:18:21 +0200 Subject: [PATCH 5/9] chore: add gpu option --- use_case_examples/resnet/run_resnet18_fhe.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/use_case_examples/resnet/run_resnet18_fhe.py b/use_case_examples/resnet/run_resnet18_fhe.py index ff3e75356..aa34e3242 100644 --- a/use_case_examples/resnet/run_resnet18_fhe.py +++ b/use_case_examples/resnet/run_resnet18_fhe.py @@ -40,9 +40,9 @@ def evaluate_model(model, processor, images, labels): print(f"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy*100:.2f}%") -def compile_model(model, images): +def compile_model(model, images, use_gpu=False): # Enable TLU fusing to optimize the number of TLUs in the residual connections - config = Configuration(enable_tlu_fusing=True, print_tlu_fusing=False) + config = Configuration(enable_tlu_fusing=True, print_tlu_fusing=False, use_gpu=use_gpu) print("Compiling the model...") return compile_torch_model( model, @@ -111,6 +111,9 @@ def main(): parser.add_argument( "--export_statistics", action="store_true", help="Export the circuit statistics." ) + parser.add_argument( + "--use_gpu", action="store_true", help="Use the available GPU at FHE runtime." + ) args = parser.parse_args() resnet18 = load_model() @@ -118,7 +121,7 @@ def main(): evaluate_model(resnet18, processor, images, labels) - q_module = compile_model(resnet18, calib_images) + q_module = compile_model(resnet18, calib_images, use_gpu=args.use_gpu) if args.export_statistics: export_statistics(q_module) From cc589bb2492ac4ec5fa65e46c1750aece119a42d Mon Sep 17 00:00:00 2001 From: jfrery Date: Fri, 21 Jun 2024 09:58:06 +0200 Subject: [PATCH 6/9] chore: move to imagenet - remove tiny imagenet --- use_case_examples/resnet/README.md | 66 +++-- use_case_examples/resnet/run_resnet18_fhe.py | 296 +++++++++++++++---- use_case_examples/resnet/utils_resnet.py | 189 ++++++------ 3 files changed, 375 insertions(+), 176 deletions(-) diff --git a/use_case_examples/resnet/README.md b/use_case_examples/resnet/README.md index 0002f6f7a..5ba82abd2 100644 --- a/use_case_examples/resnet/README.md +++ b/use_case_examples/resnet/README.md @@ -2,7 +2,7 @@ ## Overview -This project executes the ResNet18 image classification model using Fully Homomorphic Encryption (FHE) with Concrete ML. The model is adapted for FHE compatibility and tested on a small subset of tiny-imagenet (up-sampled) images. +This project executes the ResNet18 image classification model using Fully Homomorphic Encryption (FHE) with Concrete ML. The model is adapted for FHE compatibility and tested on a small subset of imagenet images. ## ResNet18 @@ -17,9 +17,9 @@ The rest is left unchanged. ## Evaluation dataset -The model is evaluated on images from [the Tiny ImageNet dataset](https://huggingface.co/datasets/zh-plus/tiny-imagenet). +The model is evaluated on images from the [ImageNet-1k dataset](https://huggingface.co/datasets/timm/imagenet-1k-wds). -The `TinyImageNetProcessor` class in `utils_resnet.py` preprocesses the Tiny ImageNet data-set and aligns it with ImageNet labels for model evaluation. +The `ImageNetProcessor` class in `utils_resnet.py` preprocesses the ImageNet validation set for model evaluation. It uses a subset of the validation data to ensure efficient processing and evaluation. ## Usage @@ -45,9 +45,25 @@ pip install -r requirements.txt 4. Run the script: ```bash -python resnet_fhe.py [--run_fhe] [--export_statistics] +python run_resnet18_fhe.py [--run_fhe] [--export_statistics] [--use_gpu] [--run_experiment] [--dataset_cache_dir ] [--num_images ] ``` +The script `run_resnet18_fhe.py` accepts several command-line arguments to control its behavior: + +- `--run_fhe`: runs the actual FHE execution of the ResNet18 model. If not set, the script will run the model without FHE. + +- `--export_statistics`: exports the circuit statistics after running the model. This can be useful for analyzing the performance and characteristics of the FHE execution. + +- `--use_gpu`: utilizes the available GPU for FHE runtime, potentially speeding up the execution. If not set, the script will run on the CPU. + +- `--run_experiment`: runs experiments with different `n_bits` and `rounding_threshold_bits` configurations. This can help in finding the optimal settings for the model. + +- `--dataset_cache_dir `: specifies the path to the directory where the dataset is cached. If not provided, the dataset will be downloaded and cached in the default location. + +- `--num_images `: specifies the number of images to process in the FHE execution. The default value is 1. Increasing this number will process more images but may take longer to execute. + + + Example of output when running the script: ```bash @@ -55,15 +71,22 @@ python resnet_fhe.py --run_fhe ``` ``` -Accuracy of the ResNet18 model on the images: 56.00% -Top-5 Accuracy of the ResNet18 model on the images: 82.00% -Compiling the model... +Accuracy of the ResNet18 model on the images: 67.00% +Top-5 Accuracy of the ResNet18 model on the images: 87.00% +Compiling the model with compile_torch_model... Model compiled successfully. -Quantized Model Accuracy of the FHEResNet18 on the images: 54.00% -Quantized Model Top-5 Accuracy of the FHEResNet18 on the images: 77.00% -FHE Simulation Accuracy of the FHEResNet18 on the images: 53.00% -FHE Simulation Top-5 Accuracy of the FHEResNet18 on the images: 75.00% -Time taken for one FHE execution: 5482.5433 seconds +Quantized Model Accuracy of the FHEResNet18 on the images: 67.00% +Quantized Model Top-5 Accuracy of the FHEResNet18 on the images: 87.00% +FHE simulation Accuracy of the FHEResNet18 on the images: 66.00% +FHE simulation Top-5 Accuracy of the FHEResNet18 on the images: 87.00% +Processing 1 image(s)... + +Image 1: + Running FHE execution... + FHE execution completed in 811.8710 seconds + FHE top 5 labels: 636, 588, 502, 774, 459 + Running simulation... + Simulation top 5 labels: 636, 588, 502, 774, 459 ``` ## Timings and Accuracy in FHE @@ -71,19 +94,18 @@ Time taken for one FHE execution: 5482.5433 seconds CPU machine: 196 cores CPU machine (hp7c from AWS) GPU machine: TBD -Summary of the accuracy evaluation on tinyImageNet (100 images): +Summary of the accuracy evaluation on ImageNet (100 images): -| w&a bits | p_error | Accuracy | Top-5 Accuracy | Runtime (hours) | Device | +| w&a bits | p_error | Accuracy | Top-5 Accuracy | Runtime | Device | | -------- | ------- | -------- | -------------- | --------------- | ------ | -| 6/6 | 0.05 | 50% | 75% | **1.52** | CPU | -| 6/6 | 0.05 | 50% | 75% | TBD | GPU | -| 6/7 | 0.05 | 53% | 76% | 2.2 | CPU | -| 6/7 | 0.005 | 57% | 74% | 5.2 | CPU | +| fp32 | - | 67% | 87% | - | - | +| 6/6 | 0.05 | 55% | 78% | 56 min | GPU | +| 6/6 | 0.05 | 55% | 78% | 1 h 31 min | CPU | +| 7/7 | 0.05 | **66%** | **87%** | **2 h 12 min** | CPU | -Note that the original model in fp32 achieved 56% accuracy and 82% top-5 accuracy. -Recommended configuration: 6/6 with p_error = 0.05 +6/6 `n_bits` configuration: {"model_inputs": 8, "op_inputs": 6, "op_weights": 6, "model_outputs": 9} -6/6 `n_bits` configuration: {"model_inputs": 8, "op_inputs": 6, "op_weights": 6, "model_outputs": 8} +7/7 `n_bits` configuration: {"model_inputs": 8, "op_inputs": 7, "op_weights": 7, "model_outputs": 9} -6/7 `n_bits` configuration: {"model_inputs": 8, "op_inputs": 7, "op_weights": 6, "model_outputs": 8} +For each setting, we use a the following config for the `rounding_threshold_bits`: `{"n_bits": 7, "method": "APPROXIMATE"}`. diff --git a/use_case_examples/resnet/run_resnet18_fhe.py b/use_case_examples/resnet/run_resnet18_fhe.py index aa34e3242..0004c0951 100644 --- a/use_case_examples/resnet/run_resnet18_fhe.py +++ b/use_case_examples/resnet/run_resnet18_fhe.py @@ -3,12 +3,14 @@ import time from pathlib import Path +import matplotlib.pyplot as plt +import numpy as np import torch from concrete.fhe import Configuration from resnet import ResNet18_Weights, resnet18_custom -from utils_resnet import TinyImageNetProcessor +from utils_resnet import ImageNetProcessor -from concrete.ml.torch.compile import compile_torch_model +from concrete.ml.torch.compile import build_quantized_module, compile_torch_model BASE_DIR = Path(__file__).resolve().parent CALIBRATION_SAMPLES = 10 @@ -21,37 +23,81 @@ def load_model(): def load_data(): - imagenet_classes_path = BASE_DIR / "LOC_synset_mapping.txt" - processor = TinyImageNetProcessor(imagenet_classes_path) - all_images, all_labels = processor.get_image_label_tensors( - num_samples=NUM_TEST_SAMPLES + CALIBRATION_SAMPLES - ) - calib_images, _ = all_images[:CALIBRATION_SAMPLES], all_labels[:CALIBRATION_SAMPLES] - images, labels = all_images[CALIBRATION_SAMPLES:], all_labels[CALIBRATION_SAMPLES:] - return processor, calib_images, images, labels + processor = ImageNetProcessor() + calib_images = processor.get_calibration_data(CALIBRATION_SAMPLES) + return processor, calib_images -def evaluate_model(model, processor, images, labels): +def evaluate_model(model, processor): with torch.no_grad(): - outputs = model(images) - accuracy = processor.compute_accuracy(outputs, labels) - topk_accuracy = processor.compute_topk_accuracy(outputs, labels, k=5) - print(f"Accuracy of the ResNet18 model on the images: {accuracy*100:.2f}%") - print(f"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy*100:.2f}%") - - -def compile_model(model, images, use_gpu=False): - # Enable TLU fusing to optimize the number of TLUs in the residual connections - config = Configuration(enable_tlu_fusing=True, print_tlu_fusing=False, use_gpu=use_gpu) - print("Compiling the model...") - return compile_torch_model( - model, - torch_inputset=images, - n_bits={"model_inputs": 8, "op_inputs": 6, "op_weights": 6, "model_outputs": 8}, - rounding_threshold_bits={"n_bits": 7, "method": "APPROXIMATE"}, - p_error=0.05, - configuration=config, - ) + device = "cpu" # torch.device("cuda" if torch.cuda.is_available() else "cpu") + model.to(device) + + outputs = [] + all_labels = [] + for batch in processor.dataloader: + batch_images = batch["pixel_values"].to(device) + batch_outputs = model(batch_images) + outputs.append(batch_outputs) + all_labels.append(batch["labels"]) + + outputs = torch.cat(outputs) + outputs = outputs.cpu() + all_labels = torch.cat(all_labels) + all_labels = all_labels.cpu() + + accuracy = processor.accuracy(outputs, all_labels) + topk_accuracy = processor.accuracy_top5(outputs, all_labels) + print(f"Accuracy of the ResNet18 model on the images: {accuracy:.2f}%") + print(f"Top-5 Accuracy of the ResNet18 model on the images: {topk_accuracy:.2f}%") + + +def compile_model( + model, images, n_bits, rounding_threshold_bits=None, fhe_mode="disable", use_gpu=False +): + """ + Compile the model using either build_quantized_module or compile_torch_model. + + Args: + model: The PyTorch model to compile. + images: The calibration images. + n_bits: The number of bits for quantization (int). Can be a dictionary: + e.g. { + "model_inputs": 8, + "op_inputs": 6, + "op_weights": 6, + "model_outputs": 8 + } + rounding_threshold_bits: The rounding threshold bits. + fhe_mode: The FHE mode ('disable' or 'simulate'). + use_gpu: Whether to use GPU for compilation. + + Returns: + The compiled quantized module. + """ + compile_config = { + "n_bits": n_bits, + "rounding_threshold_bits": ( + {"n_bits": rounding_threshold_bits, "method": "APPROXIMATE"} + if rounding_threshold_bits is not None + else None + ), + } + + if fhe_mode != "disable": + config = Configuration(enable_tlu_fusing=True, print_tlu_fusing=False, use_gpu=use_gpu) + compile_config.update( + { + "p_error": 0.05, + "configuration": config, + } + ) + compile_func = compile_torch_model + else: + compile_func = build_quantized_module + + print(f"Compiling the model with {compile_func.__name__}...") + return compile_func(model, torch_inputset=images, **compile_config) def export_statistics(q_module): @@ -76,33 +122,145 @@ def make_serializable(obj): json.dump(statistics, f) -def evaluate_model_cml(q_module, processor, images, labels, fhe): +def evaluate_model_cml(q_module, processor, fhe): assert fhe in ["disable", "simulate"], "fhe must be either 'disable' or 'simulate'" with torch.no_grad(): - outputs = q_module.forward(images.detach().numpy(), fhe=fhe) - accuracy = processor.compute_accuracy(torch.from_numpy(outputs), labels) - topk_accuracy = processor.compute_topk_accuracy(torch.from_numpy(outputs), labels, k=5) + outputs = [] + all_labels = [] + for batch in processor.dataloader: + batch_images = batch["pixel_values"].detach().numpy() + batch_outputs = q_module.forward(batch_images, fhe=fhe) + outputs.append(batch_outputs) + all_labels.append(batch["labels"].detach().numpy()) + + outputs = torch.from_numpy(np.concatenate(outputs)) + all_labels = torch.from_numpy(np.concatenate(all_labels)) + + accuracy = processor.accuracy(outputs, all_labels) + topk_accuracy = processor.accuracy_top5(outputs, all_labels) if fhe == "simulate": - print("FHE simulation Accuracy of the FHEResNet18 on the images: " f"{accuracy*100:.2f}%") + print("FHE simulation Accuracy of the FHEResNet18 on the images: " f"{accuracy:.2f}%") print( "FHE simulation Top-5 Accuracy of the FHEResNet18 on the images: " - f"{topk_accuracy*100:.2f}%" + f"{topk_accuracy:.2f}%" ) else: - print("Quantized Model Accuracy of the FHEResNet18 on the images: " f"{accuracy*100:.2f}%") + print("Quantized Model Accuracy of the FHEResNet18 on the images: " f"{accuracy:.2f}%") print( "Quantized Model Top-5 Accuracy of the FHEResNet18 on the images: " - f"{topk_accuracy*100:.2f}%" + f"{topk_accuracy:.2f}%" ) -def run_fhe_execution(q_module, images): - single_image = images[0:1].detach().numpy() - start = time.time() - _ = q_module.forward(single_image, fhe="execute") - end = time.time() - print(f"Time taken for one FHE execution: {end - start:.4f} seconds") +def run_fhe_execution(q_module, images, num_images=1): + images = images[:num_images].detach().numpy() + n_features = images.shape[1:] # Get the shape of features (channels, height, width) + + def get_top5_labels(output): + return np.argsort(output.flatten())[-5:][::-1] + + print(f"Processing {num_images} image(s)...") + total_fhe_time = 0 + + for i in range(num_images): + print(f"\nImage {i+1}:") + img = images[i].reshape(1, *n_features) # Reshape to (1, *n_features) + + print(" Running FHE execution...") + start = time.time() + output_fhe = q_module.forward(img, fhe="execute") + fhe_end = time.time() + fhe_time = fhe_end - start + total_fhe_time += fhe_time + print(f" FHE execution completed in {fhe_time:.4f} seconds") + + fhe_top5 = get_top5_labels(output_fhe) + print(" FHE top 5 labels:", ", ".join(map(str, fhe_top5))) + + print(" Running simulation...") + output_simulate = q_module.forward(img, fhe="simulate") + + sim_top5 = get_top5_labels(output_simulate) + print(" Simulation top 5 labels:", ", ".join(map(str, sim_top5))) + + print(f"\nTotal FHE execution time for {num_images} image(s): {total_fhe_time:.4f} seconds") + + +def run_experiment(resnet18, calib_images, processor, fhe_mode="disable"): + + # Define ranges for n_bits and rounding_threshold_bits + n_bits_range = range(2, 16) + rounding_threshold_bits_range = list(range(2, 9)) + [None] # 2 to 8 and None + + # Initialize a dictionary to store accuracies for each combination + accuracies = {} + + total_combinations = len(n_bits_range) * len(rounding_threshold_bits_range) + current_combination = 0 + + # Loop over the ranges of n_bits and rounding_threshold_bits + for n_bits in n_bits_range: + for rounding_threshold_bits in rounding_threshold_bits_range: + current_combination += 1 + print(f"\nProcessing combination {current_combination}/{total_combinations}") + print(f"n_bits: {n_bits}, rounding_threshold_bits: {rounding_threshold_bits}") + + q_module = compile_model( + resnet18, calib_images, n_bits, rounding_threshold_bits, fhe_mode + ) + + outputs = [] + all_labels = [] + + print("Evaluating model...") + for batch in processor.dataloader: + batch_images = batch["pixel_values"].detach().numpy() + batch_outputs = q_module.forward(batch_images, fhe=fhe_mode) + outputs.append(batch_outputs) + all_labels.append(batch["labels"].detach().numpy()) + + outputs = torch.from_numpy(np.concatenate(outputs)) + all_labels = torch.from_numpy(np.concatenate(all_labels)) + + # Calculate and store accuracy + fhe_accuracy = processor.accuracy(outputs, all_labels) + accuracies[(n_bits, rounding_threshold_bits)] = fhe_accuracy + print(f"Accuracy: {fhe_accuracy:.4f}") + + # Convert accuracies to a 2D array for plotting + accuracy_matrix = np.zeros((len(n_bits_range), len(rounding_threshold_bits_range))) + for i, n_bits in enumerate(n_bits_range): + for j, rounding_threshold_bits in enumerate(rounding_threshold_bits_range): + accuracy_matrix[i, j] = accuracies[(n_bits, rounding_threshold_bits)] + + # Save the accuracy matrix to disk + np.save("accuracy_matrix.npy", accuracy_matrix) + + print("\nGenerating plot...") + + # Plotting + fig, ax = plt.subplots(figsize=(10, 8)) + cax = ax.matshow(accuracy_matrix, cmap="viridis") + fig.colorbar(cax) + + # Set ticks and labels + ax.set_xticks(range(len(rounding_threshold_bits_range))) + ax.set_xticklabels([str(x) for x in rounding_threshold_bits_range], rotation=45) + ax.set_yticks(range(len(n_bits_range))) + ax.set_yticklabels([str(x) for x in n_bits_range]) + ax.set_xlabel("Rounding Threshold Bits") + ax.set_ylabel("N Bits") + ax.set_title(f"Accuracy of FHE ({fhe_mode})") + + # Annotate each cell with the accuracy percentage + for i in range(len(n_bits_range)): + for j in range(len(rounding_threshold_bits_range)): + ax.text(j, i, f"{accuracy_matrix[i, j]:.2f}", va="center", ha="center", color="white") + + plt.tight_layout() + plt.savefig("accuracy_matrix.png", dpi=300) + print("Plot saved as accuracy_matrix.png") def main(): @@ -114,25 +272,57 @@ def main(): parser.add_argument( "--use_gpu", action="store_true", help="Use the available GPU at FHE runtime." ) + parser.add_argument( + "--run_experiment", + action="store_true", + help="Run the experiment with different n_bits and rounding_threshold_bits.", + ) + parser.add_argument( + "--dataset_cache_dir", + type=str, + default=None, + help="Path to the directory where the dataset is cached.", + ) + parser.add_argument( + "--num_images", + type=int, + default=1, + help="Number of images to process in FHE execution (default: 1)", + ) args = parser.parse_args() resnet18 = load_model() - processor, calib_images, images, labels = load_data() + processor = ImageNetProcessor( + NUM_TEST_SAMPLES, CALIBRATION_SAMPLES, cache_dir=args.dataset_cache_dir + ) + calib_images = processor.get_calibration_tensor() - evaluate_model(resnet18, processor, images, labels) + evaluate_model(resnet18, processor) - q_module = compile_model(resnet18, calib_images, use_gpu=args.use_gpu) + if args.run_experiment: + # Get the test images and labels + run_experiment(resnet18, calib_images, processor) + else: + q_module = compile_model( + resnet18, + calib_images, + n_bits={"model_inputs": 8, "op_inputs": 7, "op_weights": 7, "model_outputs": 9}, + rounding_threshold_bits=7, + fhe_mode="simulate", + use_gpu=args.use_gpu, + ) - if args.export_statistics: - export_statistics(q_module) + if args.export_statistics: + export_statistics(q_module) - print("Model compiled successfully.") + print("Model compiled successfully.") - evaluate_model_cml(q_module, processor, images, labels, fhe="disable") - evaluate_model_cml(q_module, processor, images, labels, fhe="simulate") + evaluate_model_cml(q_module, processor, fhe="disable") + evaluate_model_cml(q_module, processor, fhe="simulate") if args.run_fhe: - run_fhe_execution(q_module, images) + num_images = args.num_images + run_fhe_execution(q_module, calib_images, num_images) else: print("FHE execution was not run. Use --run_fhe to enable it.") diff --git a/use_case_examples/resnet/utils_resnet.py b/use_case_examples/resnet/utils_resnet.py index 39aef5384..3462996c5 100644 --- a/use_case_examples/resnet/utils_resnet.py +++ b/use_case_examples/resnet/utils_resnet.py @@ -1,122 +1,109 @@ import torch from datasets import load_dataset +from torch.utils.data import DataLoader from torchvision import transforms -class TinyImageNetProcessor: - """Processor for Tiny ImageNet dataset to align it with ImageNet labels for model evaluation. - - It preprocesses images to ImageNet standards, maps labels between Tiny ImageNet and ImageNet, - and evaluates model predictions with these mappings. - """ - - def __init__(self, imagenet_classes_path): - """Initializes the processor with the path to ImageNet classes and loads the dataset. - - Args: - imagenet_classes_path (str): Path to the file containing ImageNet class labels. - """ - self.imagenet_classes_path = imagenet_classes_path - self.dataset = load_dataset("zh-plus/tiny-imagenet") - self.target_imagenet_to_tiny, self.target_tiny_to_imagenet = self._load_and_map_labels() - - def _load_and_map_labels(self): - """Loads ImageNet labels from a file and creates mappings with the dataset labels. - - Returns: - tuple: Two dictionaries for label mapping between ImageNet and Tiny ImageNet. - """ - try: - with open(self.imagenet_classes_path, "r") as file: - lines = file.readlines() - except IOError: - raise FileNotFoundError("The ImageNet classes file was not found.") - - label_to_imagenet_idx = {line.split()[0]: idx for idx, line in enumerate(lines)} - tiny_labels = { - label: idx for idx, label in enumerate(self.dataset["train"].features["label"].names) - } - - common_labels = set(label_to_imagenet_idx.keys()) & set(tiny_labels.keys()) - imagenet_to_tiny = { - label_to_imagenet_idx[label]: tiny_labels[label] for label in common_labels - } - tiny_to_imagenet = {v: k for k, v in imagenet_to_tiny.items()} - - return imagenet_to_tiny, tiny_to_imagenet - - def get_image_label_tensors(self, num_samples=100): - """Fetches and preprocesses a specified number of image samples. +class ImageNetProcessor: + def __init__( + self, + num_samples=1000, + calibration_samples=100, + batch_size=32, + num_workers=4, + image_size=224, + seed=42, + cache_dir=None, + ): + self.num_samples = num_samples + self.calibration_samples = calibration_samples + self.batch_size = batch_size + self.num_workers = num_workers + self.seed = seed + + # Set the global seed for Torch + torch.manual_seed(self.seed) + + # Load the validation set in streaming mode + dataset = load_dataset( + "timm/imagenet-1k-wds", split="validation", streaming=True, cache_dir=cache_dir + ) - Args: - num_samples (int): Number of samples to process. + # Shuffle the dataset and take required samples + shuffled_dataset = dataset.shuffle(seed=seed) + self.main_dataset = shuffled_dataset.take(num_samples) + self.calibration_dataset = shuffled_dataset.skip(num_samples).take(calibration_samples) - Returns: - tuple: Tensors of images and their corresponding labels. - """ - transform = transforms.Compose( + # Define the transforms + self.transform = transforms.Compose( [ transforms.Resize(256), - transforms.CenterCrop(224), + transforms.CenterCrop(image_size), transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), ] ) - rgb_samples = [] - for sample in self.dataset["valid"].shuffle(seed=0): - if ( - sample["image"].mode == "RGB" - and sample["label"] in self.target_imagenet_to_tiny.values() - ): - rgb_samples.append((transform(sample["image"]), sample["label"])) - if len(rgb_samples) == num_samples: - break - images, labels = zip(*rgb_samples) if rgb_samples else ([], []) - return torch.stack(images), torch.tensor(labels) - - def compute_accuracy(self, outputs, labels): - """Computes the accuracy of model outputs compared to true labels. - - Args: - outputs (torch.Tensor): Model outputs. - labels (torch.Tensor): True labels. - - Returns: - float: Accuracy metric. - """ - imagenet_labels = torch.tensor( - [self.target_tiny_to_imagenet[label.item()] for label in labels] - ) - relevant_indices = [self.target_tiny_to_imagenet[label.item()] for label in labels] - filtered_outputs = outputs[:, relevant_indices] + # Create the main dataloader + self.dataloader = self._create_dataloader(self.main_dataset) - predicted_labels = torch.tensor( - [relevant_indices[idx] for idx in filtered_outputs.argmax(dim=-1)] - ) - return (predicted_labels == imagenet_labels).float().mean().item() + # Create the calibration dataloader + self.calibration_dataloader = self._create_dataloader(self.calibration_dataset) - def compute_topk_accuracy(self, outputs, labels, k=5): - """Computes top-k accuracy of the model outputs. + def preprocess(self, example): + example["pixel_values"] = self.transform(example["jpg"].convert("RGB")) + return example - Args: - outputs (torch.Tensor): Model outputs. - labels (torch.Tensor): True labels. - k (int): Top k predictions to consider. + def get_calibration_tensor(self): + # Process all calibration samples + calibration_samples = [self.preprocess(example) for example in self.calibration_dataset] - Returns: - float: Top-k accuracy metric. - """ - imagenet_labels = torch.tensor( - [self.target_tiny_to_imagenet[label.item()] for label in labels] - ) - relevant_indices = [self.target_tiny_to_imagenet[label.item()] for label in labels] - filtered_outputs = outputs[:, relevant_indices] + # Stack all preprocessed images into a single tensor + calibration_tensor = torch.stack([sample["pixel_values"] for sample in calibration_samples]) + + return calibration_tensor + + def _create_dataloader(self, dataset): + def collate_fn(examples): + processed_examples = [self.preprocess(example) for example in examples] + pixel_values = torch.stack([example["pixel_values"] for example in processed_examples]) + labels = torch.tensor([example["cls"] for example in processed_examples]) + return {"pixel_values": pixel_values, "labels": labels} + + # Create a Generator with a fixed seed + generator = torch.Generator() + generator.manual_seed(self.seed) - topk_preds = filtered_outputs.topk(k, dim=-1).indices - topk_labels = torch.tensor( - [[relevant_indices[pred] for pred in preds] for preds in topk_preds] + return DataLoader( + list(dataset), + batch_size=self.batch_size, + num_workers=self.num_workers, + collate_fn=collate_fn, + shuffle=True, + generator=generator, + worker_init_fn=lambda worker_id: torch.manual_seed(self.seed + worker_id), ) - correct = sum(imagenet_labels[i] in topk_labels[i] for i in range(len(imagenet_labels))) - return correct / len(imagenet_labels) + @staticmethod + def compute_accuracy(outputs, targets, topk=(1,)): + with torch.no_grad(): + maxk = max(topk) + batch_size = targets.size(0) + + _, pred = outputs.topk(maxk, 1, True, True) + pred = pred.t() + correct = pred.eq(targets.view(1, -1).expand_as(pred)) + + res = [] + for k in topk: + correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True) + res.append(correct_k.mul_(100.0 / batch_size)) + return res + + @classmethod + def accuracy(cls, outputs, targets): + return cls.compute_accuracy(outputs, targets, topk=(1,))[0].item() + + @classmethod + def accuracy_top5(cls, outputs, targets): + return cls.compute_accuracy(outputs, targets, topk=(5,))[0].item() From 5bf64c6bae6c97364c1cef1b18dfb4c2cbd38e4a Mon Sep 17 00:00:00 2001 From: jfrery Date: Mon, 24 Jun 2024 13:28:24 +0200 Subject: [PATCH 7/9] chore: add gpu machine --- use_case_examples/resnet/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/use_case_examples/resnet/README.md b/use_case_examples/resnet/README.md index 5ba82abd2..1f4fec91b 100644 --- a/use_case_examples/resnet/README.md +++ b/use_case_examples/resnet/README.md @@ -92,7 +92,7 @@ Image 1: ## Timings and Accuracy in FHE CPU machine: 196 cores CPU machine (hp7c from AWS) -GPU machine: TBD +GPU machine: 8xH100 GPU machine Summary of the accuracy evaluation on ImageNet (100 images): From 65ec6ca9068c628407fc531fdceb26497913578b Mon Sep 17 00:00:00 2001 From: jfrery Date: Mon, 24 Jun 2024 15:24:13 +0200 Subject: [PATCH 8/9] chore: fix forbidden words --- use_case_examples/resnet/README.md | 6 +++--- use_case_examples/resnet/run_resnet18_fhe.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/use_case_examples/resnet/README.md b/use_case_examples/resnet/README.md index 1f4fec91b..15513031f 100644 --- a/use_case_examples/resnet/README.md +++ b/use_case_examples/resnet/README.md @@ -15,9 +15,9 @@ The ResNet18 model is adapted from torchvision the original https://github.com/p The rest is left unchanged. -## Evaluation dataset +## Evaluation data-set -The model is evaluated on images from the [ImageNet-1k dataset](https://huggingface.co/datasets/timm/imagenet-1k-wds). +The model is evaluated on images from the [ImageNet-1k data-set](https://huggingface.co/datasets/timm/imagenet-1k-wds). The `ImageNetProcessor` class in `utils_resnet.py` preprocesses the ImageNet validation set for model evaluation. It uses a subset of the validation data to ensure efficient processing and evaluation. @@ -58,7 +58,7 @@ The script `run_resnet18_fhe.py` accepts several command-line arguments to contr - `--run_experiment`: runs experiments with different `n_bits` and `rounding_threshold_bits` configurations. This can help in finding the optimal settings for the model. -- `--dataset_cache_dir `: specifies the path to the directory where the dataset is cached. If not provided, the dataset will be downloaded and cached in the default location. +- `--dataset_cache_dir `: specifies the path to the directory where the data-set is cached. If not provided, the data-set will be downloaded and cached in the default location. - `--num_images `: specifies the number of images to process in the FHE execution. The default value is 1. Increasing this number will process more images but may take longer to execute. diff --git a/use_case_examples/resnet/run_resnet18_fhe.py b/use_case_examples/resnet/run_resnet18_fhe.py index 0004c0951..9cdf54973 100644 --- a/use_case_examples/resnet/run_resnet18_fhe.py +++ b/use_case_examples/resnet/run_resnet18_fhe.py @@ -62,7 +62,7 @@ def compile_model( model: The PyTorch model to compile. images: The calibration images. n_bits: The number of bits for quantization (int). Can be a dictionary: - e.g. { + { "model_inputs": 8, "op_inputs": 6, "op_weights": 6, From 3feebe8444b7f5817218649997535e326894df29 Mon Sep 17 00:00:00 2001 From: jfrery Date: Tue, 25 Jun 2024 15:33:37 +0200 Subject: [PATCH 9/9] chore: fix codeblock --- use_case_examples/resnet/README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/use_case_examples/resnet/README.md b/use_case_examples/resnet/README.md index 15513031f..9c1409645 100644 --- a/use_case_examples/resnet/README.md +++ b/use_case_examples/resnet/README.md @@ -25,6 +25,8 @@ The `ImageNetProcessor` class in `utils_resnet.py` preprocesses the ImageNet val 1. Install a virtual Python environment and activate it: + + ```bash python -m venv venv source venv/bin/activate @@ -32,18 +34,24 @@ source venv/bin/activate 2. Install Concrete ML: + + ```bash pip install concrete-ml ``` 3. Install the dependencies: + + ```bash pip install -r requirements.txt ``` 4. Run the script: + + ```bash python run_resnet18_fhe.py [--run_fhe] [--export_statistics] [--use_gpu] [--run_experiment] [--dataset_cache_dir ] [--num_images ] ``` @@ -66,6 +74,8 @@ The script `run_resnet18_fhe.py` accepts several command-line arguments to contr Example of output when running the script: + + ```bash python resnet_fhe.py --run_fhe ```