diff --git a/go.mod b/go.mod index 7353ba2..1c906b3 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ require ( github.com/bflad/tfproviderlint v0.18.0 github.com/client9/misspell v0.3.4 github.com/go-openapi/strfmt v0.19.5 - github.com/golangci/golangci-lint v1.27.0 + github.com/golangci/golangci-lint v1.36.0 github.com/hashicorp/terraform-plugin-sdk v1.15.0 github.com/koalificationio/go-webhookrelay v0.3.0 ) diff --git a/go.sum b/go.sum index 776a371..acd017c 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,5 @@ +4d63.com/gochecknoglobals v0.0.0-20201008074935-acfc0b28355a h1:wFEQiK85fRsEVF0CRrPAos5LoAryUsIX1kPW/WrIqFw= +4d63.com/gochecknoglobals v0.0.0-20201008074935-acfc0b28355a/go.mod h1:wfdC5ZjKSPr7CybKEcgJhUOgeAQW1+7WcyK8OvUilfo= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -37,8 +39,10 @@ dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7 github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/Djarvur/go-err113 v0.0.0-20200410182137-af658d038157 h1:hY39LwQHh+1kaovmIjOrlqnXNX6tygSRfLkkK33IkZU= -github.com/Djarvur/go-err113 v0.0.0-20200410182137-af658d038157/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= +github.com/Djarvur/go-err113 v0.0.0-20200511133814-5174e21577d5 h1:XTrzB+F8+SpRmbhAH8HLxhiiG6nYNwaBZjrFps1oWEk= +github.com/Djarvur/go-err113 v0.0.0-20200511133814-5174e21577d5/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= +github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OpenPeeDeeP/depguard v1.0.1 h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us= github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM= @@ -54,6 +58,7 @@ github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki github.com/agl/ed25519 v0.0.0-20170116200512-5312a6153412/go.mod h1:WPjqKcmVOxf0XSf3YxCJs6N6AOSrOx3obionmG7T0y0= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/andybalholm/brotli v1.0.0/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= github.com/apparentlymart/go-cidr v1.0.1 h1:NmIwLZ/KdsjIUlhf+/Np40atNXm/+lZ5txfTJ/SpF+U= github.com/apparentlymart/go-cidr v1.0.1/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3 h1:ZSTrOEhiM5J5RFxEaFvMZVEAM1KvT1YzbEOwB2EAGjA= @@ -64,7 +69,6 @@ github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2 github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= @@ -77,6 +81,10 @@ github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535 h1:4daAzAu0 github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg= github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef h1:46PFijGLmAjMPwCCCo7Jf0W6f9slllCkkv7vyc1yOSg= github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/ashanbrown/forbidigo v1.0.0 h1:QdNXBduDUopc3GW+YVYZn8jzmIMklQiCfdN2N5+dQeE= +github.com/ashanbrown/forbidigo v1.0.0/go.mod h1:PH+zMRWE15yW69fYfe7Kn8nYR6yYyafc3ntEGh2BBAg= +github.com/ashanbrown/makezero v0.0.0-20201205152432-7b7cdbb3025a h1:/U9tbJzDRof4fOR51vwzWdIBsIH6R2yU0KG1MBRM2Js= +github.com/ashanbrown/makezero v0.0.0-20201205152432-7b7cdbb3025a/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU= github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM= github.com/aws/aws-sdk-go v1.25.3 h1:uM16hIw9BotjZKMZlX05SN2EFtaWfi/NonPKIARiBLQ= github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= @@ -97,8 +105,8 @@ github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kB github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= github.com/bmatcuk/doublestar v1.2.1 h1:eetYiv8DDYOZcBADY+pRvRytf3Dlz1FhnpvL2FsClBc= github.com/bmatcuk/doublestar v1.2.1/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= -github.com/bombsimon/wsl/v3 v3.0.0 h1:w9f49xQatuaeTJFaNP4SpiWSR5vfT6IstPtM62JjcqA= -github.com/bombsimon/wsl/v3 v3.0.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= +github.com/bombsimon/wsl/v3 v3.1.0 h1:E5SRssoBgtVFPcYWUOFJEcgaySgdtTNYzsSKDOY7ss8= +github.com/bombsimon/wsl/v3 v3.1.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s= @@ -109,19 +117,20 @@ github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/daixiang0/gci v0.2.8 h1:1mrIGMBQsBu0P7j7m1M8Lb+ZeZxsZL+jyGX4YoMJJpg= +github.com/daixiang0/gci v0.2.8/go.mod h1:+4dZ7TISfSmqfAGv59ePaHfNzgGtIkHAhhdKggP1JAc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/denis-tingajkin/go-header v0.4.2 h1:jEeSF4sdv8/3cT/WY8AgDHUoItNSoEZ7qg9dX7pc218= +github.com/denis-tingajkin/go-header v0.4.2/go.mod h1:eLRHAVXzE5atsKAnNRDB90WHCFFnBUn4RN0nRcs1LJA= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= @@ -130,8 +139,13 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/esimonov/ifshort v1.0.0 h1:mcOSoOMVtL4tJyyDTakunR+KFQUywLLAVesiWleGPHU= +github.com/esimonov/ifshort v1.0.0/go.mod h1:yZqNJUrNn20K8Q9n2CrjTKYyVEmX209Hgu+M1LBpeZE= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg= +github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= @@ -139,14 +153,12 @@ github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4 github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= -github.com/go-critic/go-critic v0.4.1 h1:4DTQfT1wWwLg/hzxwD9bkdhDQrdJtxe6DUTadPlrIeE= -github.com/go-critic/go-critic v0.4.1/go.mod h1:7/14rZGnZbY6E38VEGk2kVhoq6itzc1E68facVDK23g= +github.com/go-critic/go-critic v0.5.3 h1:xQEweNxzBNpSqI3wotXZAixRarETng3PTG4pkcrLCOA= +github.com/go-critic/go-critic v0.5.3/go.mod h1:2Lrs1m4jtOnnG/EdezbSpAoL0F2pRW+9HWJUZ+QaktY= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-lintpack/lintpack v0.5.2 h1:DI5mA3+eKdWeJ40nU4d6Wc26qmdG8RCi/btYq0TuRN0= -github.com/go-lintpack/lintpack v0.5.2/go.mod h1:NwZuYi2nUHho8XEIZ6SIxihrnPoqBTDqfpXvXAN0sXM= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8= @@ -239,23 +251,22 @@ github.com/go-toolsmith/astcast v1.0.0 h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4= github.com/go-toolsmith/astcopy v1.0.0 h1:OMgl1b1MEpjFQ1m5ztEO06rz5CUd3oBv9RF7+DyvdG8= github.com/go-toolsmith/astcopy v1.0.0/go.mod h1:vrgyG+5Bxrnz4MZWPF+pI4R8h3qKRjjyvV/DSez4WVQ= -github.com/go-toolsmith/astequal v0.0.0-20180903214952-dcb477bfacd6/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= github.com/go-toolsmith/astequal v1.0.0 h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ= github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= -github.com/go-toolsmith/astfmt v0.0.0-20180903215011-8f8ee99c3086/go.mod h1:mP93XdblcopXwlyN4X4uodxXQhldPGZbcEJIimQHrkg= github.com/go-toolsmith/astfmt v1.0.0 h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k= github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw= github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU= -github.com/go-toolsmith/astp v0.0.0-20180903215135-0af7e3c24f30/go.mod h1:SV2ur98SGypH1UjcPpCatrV5hPazG6+IfNHbkDXBRrk= github.com/go-toolsmith/astp v1.0.0 h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg= github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI= -github.com/go-toolsmith/pkgload v0.0.0-20181119091011-e9e65178eee8/go.mod h1:WoMrjiy4zvdS+Bg6z9jZH82QXwkcgCBX6nOfnmdaHks= github.com/go-toolsmith/pkgload v1.0.0 h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg= github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc= github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= github.com/go-toolsmith/typep v1.0.0 h1:zKymWyA1TRYvqYrYDrfEMZULyrhcnGY3x7LDKU2XQaA= github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= +github.com/go-toolsmith/typep v1.0.2 h1:8xdsa1+FSIH/RhEkgnD1j2CJOy5mNllW1Q9tRiYwvlk= +github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= +github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo= github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= @@ -283,8 +294,8 @@ github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/V github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= -github.com/gofrs/flock v0.0.0-20190320160742-5135e617513b h1:ekuhfTjngPhisSjOJ0QWKpPQE8/rbknHaes6WVJj5Hw= -github.com/gofrs/flock v0.0.0-20190320160742-5135e617513b/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gofrs/flock v0.8.0 h1:MSdYClljsF3PbENUUEx85nkWfJSGfzYI9yEBZOJz6CY= +github.com/gofrs/flock v0.8.0/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1 h1:/s5zKNz0uPFCZ5hddgPdo2TK2TVrUNMn0OOX8/aZMTE= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= @@ -329,14 +340,12 @@ github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6 h1:YYWNAGTKWhKpc github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0= github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw= github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= -github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3 h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8= -github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3/go.mod h1:JXrF4TWy4tXYn62/9x8Wm/K/dm06p8tCKwFRDPZG/1o= -github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee h1:J2XAy40+7yz70uaOiMbNnluTg7gyQhtGqLQncQh+4J8= -github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU= +github.com/golangci/gocyclo v0.0.0-20180528144436-0a533e8fa43d h1:pXTK/gkVNs7Zyy7WKgLXmpQ5bHTrq5GDsp8R9Qs67g0= +github.com/golangci/gocyclo v0.0.0-20180528144436-0a533e8fa43d/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU= github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks= github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU= -github.com/golangci/golangci-lint v1.27.0 h1:VYLx63qb+XJsHdZ27PMS2w5JZacN0XG8ffUwe7yQomo= -github.com/golangci/golangci-lint v1.27.0/go.mod h1:+eZALfxIuthdrHPtfM7w/R3POJLjHDfJJw8XZl9xOng= +github.com/golangci/golangci-lint v1.36.0 h1:x9ysLvMFJ2599BdiB3feiJmEJlZpOT9hg4rAQt0Dsiw= +github.com/golangci/golangci-lint v1.36.0/go.mod h1:SZhIZhUXXT/OcDb9rymj0J21mOVikjj51zunqnXM34Y= github.com/golangci/ineffassign v0.0.0-20190609212857-42439a7714cc h1:gLLhTLMk2/SutryVJ6D4VZCU3CUqr8YloG7FPIBWFpI= github.com/golangci/ineffassign v0.0.0-20190609212857-42439a7714cc/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= @@ -361,6 +370,10 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0 h1:/QaMHBdZ26BB3SSst0Iwl10Epc+xhTquomWX0oZEB6w= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0 h1:pMen7vLs8nvgEYhywH3KDWJIJTeEr2ULsVWHWYHQyBs= @@ -379,15 +392,21 @@ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/gookit/color v1.2.4/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg= +github.com/gookit/color v1.3.6/go.mod h1:R3ogXq2B9rTbXoSHJ1HyUVAZ3poOJHpd9nQmyGZsfvQ= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/handlers v1.4.2 h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg= github.com/gorilla/handlers v1.4.2/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= -github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3 h1:JVnpOZS+qxli+rgVl98ILOXVNbW+kb5wcxeGx8ShUIw= github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= +github.com/gostaticanalysis/analysisutil v0.4.1 h1:/7clKqrVfiVwiBQLM0Uke4KvXnO6JcCTS7HwF2D6wG8= +github.com/gostaticanalysis/analysisutil v0.4.1/go.mod h1:18U/DLpRgIUd459wGxVHE0fRgmo1UgHDcbw7F5idXu0= +github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= +github.com/gostaticanalysis/comment v1.4.1 h1:xHopR5L2lRz6OsjH4R2HG5wRhW9ySl3FsHIvi5pcXwc= +github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= @@ -479,6 +498,8 @@ github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NH github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jgautheron/goconst v0.0.0-20201117150253-ccae5bf973f3 h1:7nkB9fLPMwtn/R6qfPcHileL/x9ydlhw8XyDrLI1ZXg= +github.com/jgautheron/goconst v0.0.0-20201117150253-ccae5bf973f3/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a h1:GmsqmapfzSJkm28dhRoHz2tLRbJmqhU86IPgBtN3mmk= @@ -488,6 +509,7 @@ github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3/go.mod github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/jmoiron/sqlx v1.2.1-0.20190826204134-d7d95172beb5/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= @@ -507,17 +529,16 @@ github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvW github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.4.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= -github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= +github.com/klauspost/compress v1.10.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.0/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/koalificationio/go-webhookrelay v0.3.0 h1:eFdGgEyfKfg6PRIb8doBw+NWXQ3teRKLmBmtvLmpwJ4= github.com/koalificationio/go-webhookrelay v0.3.0/go.mod h1:KaV/+NJdi3i54lYx6ZJXATjSuqvcDTFrzGBsSAux1bQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= @@ -530,12 +551,18 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kulti/thelper v0.2.1 h1:H4rSHiB3ALx//SXr+k9OPqKoOw2cAZpIQwVNH1RL5T4= +github.com/kulti/thelper v0.2.1/go.mod h1:vMu2Cizjy/grP+jmsvOFDx1kYP6+PD1lqg4Yu5exl2U= +github.com/kunwardeep/paralleltest v1.0.2 h1:/jJRv0TiqPoEy/Y8dQxCFJhD56uS/pnvtatgTZBHokU= +github.com/kunwardeep/paralleltest v1.0.2/go.mod h1:ZPqNm1fVHPllh5LPVujzbVz1JN2GhLxSfY+oqUsvG30= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/kyoh86/exportloopref v0.1.8 h1:5Ry/at+eFdkX9Vsdw3qU4YkvGtzuVfzT4X7S77LoN/M= +github.com/kyoh86/exportloopref v0.1.8/go.mod h1:1tUcJeiioIs7VWe5gcOObrux3lb66+sBqGZrRkMwPgg= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4= github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= @@ -558,6 +585,8 @@ github.com/mattn/go-colorable v0.1.1 h1:G1f5SKeVxmagw/IyvzvtZE4Gybcc4Tr1tf7I8z0X github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= @@ -566,10 +595,15 @@ github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.10 h1:qxFzApOv4WsAL965uUPIsXzAKCZxN2p9UqdhFS4ZW10= github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mbilski/exhaustivestruct v1.1.0 h1:4ykwscnAFeHJruT+EY3M3vdeP8uXMh0VV2E61iR7XD8= +github.com/mbilski/exhaustivestruct v1.1.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/mitchellh/cli v1.0.0 h1:iGBIsUe3+HZ/AD/Vd7DErOt5sU9fa8Uj7A2s1aggv1Y= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= @@ -580,7 +614,7 @@ github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFW github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b/go.mod h1:r1VsdOzOPt1ZSrGZWFoNhsAedKnEd6r9Np1+5blZCWk= +github.com/mitchellh/go-ps v1.0.0/go.mod h1:J4lOc8z8yJs6vUwklHw2XEIiT4z4C40KtWVN3nvg8Pg= github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= @@ -604,23 +638,33 @@ github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= -github.com/mozilla/tls-observatory v0.0.0-20200317151703-4fa42e1c2dee/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk= +github.com/moricho/tparallel v0.2.1 h1:95FytivzT6rYzdJLdtfn6m1bfFJylOJK41+lgv/EHf4= +github.com/moricho/tparallel v0.2.1/go.mod h1:fXEIZxG2vdfl0ZF8b42f5a78EhjjD5mX8qUplsoSU4k= +github.com/mozilla/tls-observatory v0.0.0-20201209171846-0547674fceff/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nakabonne/nestif v0.3.0 h1:+yOViDGhg8ygGrmII72nV9B/zGxY188TYpfolntsaPw= github.com/nakabonne/nestif v0.3.0/go.mod h1:dI314BppzXjJ4HsCnbo7XzrJHPszZsjnk5wEBSYHI2c= -github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E= -github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU= +github.com/nbutton23/zxcvbn-go v0.0.0-20201221231540-e56b841a3c88 h1:o+O3Cd1HO9CTgxE3/C8p5I5Y4C0yYWbF8d4IkfOLtcQ= +github.com/nbutton23/zxcvbn-go v0.0.0-20201221231540-e56b841a3c88/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nishanths/exhaustive v0.1.0 h1:kVlMw8h2LHPMGUVqUj6230oQjjTMFjwcZrnkhXzFfl8= +github.com/nishanths/exhaustive v0.1.0/go.mod h1:S1j9110vxV1ECdCudXRkeMnFQ/DQk9ajLT0Uf2MYZQQ= +github.com/nishanths/predeclared v0.2.1 h1:1TXtjmy4f3YCFjTxRd8zcFHOmoUir+gp0ESzjFzG2sw= +github.com/nishanths/predeclared v0.2.1/go.mod h1:HvkGJcA3naj4lOwnFXFDkFxVtSqQMB9sbB1usJ+xjQE= +github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.0 h1:Iw5WCbBcaAAd0fpRb1c9r5YCylv4XDoCSigm1zLevwU= -github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.14.2 h1:8mVmC9kjFFmA8H4pKMUhcblgifdkOIXPvbhN1T36q1M= +github.com/onsi/ginkgo v1.14.2/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.9.0 h1:R1uwffexN6Pr340GtYRIdZmAiN4J+iw6WG4wog1DUXg= -github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.10.4 h1:NiTx7EEvBzu9sFOD1zORteLSt3o8gnlvZZwSE9TnY9U= +github.com/onsi/gomega v1.10.4/go.mod h1:g/HbgYopi++010VEqkFgJHKC09uJiW9UkXvMUuKHUCQ= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= @@ -628,6 +672,7 @@ github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfS github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= github.com/pelletier/go-toml v1.8.0 h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw= github.com/pelletier/go-toml v1.8.0/go.mod h1:D6yutnOGMveHEPV7VQOuvI/gXY61bv+9bAOTRnLElKs= +github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA= github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -638,6 +683,8 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/polyfloyd/go-errorlint v0.0.0-20201127212506-19bd8db6546f h1:xAw10KgJqG5NJDfmRqJ05Z0IFblKumjtMeyiOLxj3+4= +github.com/polyfloyd/go-errorlint v0.0.0-20201127212506-19bd8db6546f/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.1 h1:LrvDIY//XNo65Lq84G/akBuMGlawHvGBABv8f/ZN6DI= github.com/posener/complete v1.2.1/go.mod h1:6gapUrK/U1TAN7ciCoNRIdVC5sbdBTUh1DKN0g6uH7E= @@ -653,37 +700,53 @@ github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI= +github.com/quasilyte/go-ruleguard v0.2.1-0.20201030093329-408e96760278 h1:5gcJ7tORNCNB2QjOJF+MYjzS9aiWpxhP3gntf7RVrOQ= +github.com/quasilyte/go-ruleguard v0.2.1-0.20201030093329-408e96760278/go.mod h1:2RT/tf0Ce0UDj5y243iWKosQogJd8+1G3Rs2fxmlYnw= +github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 h1:L8QM9bvf68pVdQ3bCFZMDmnt9yqcMBro1pC7F+IPYMY= +github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= -github.com/ryancurrah/gomodguard v1.0.4 h1:oCreMAt9GuFXDe9jW4HBpc3GjdX3R/sUEcLAGh1zPx8= -github.com/ryancurrah/gomodguard v1.0.4/go.mod h1:9T/Cfuxs5StfsocWr4WzDL36HqnX0fVb9d5fSEaLhoE= +github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryancurrah/gomodguard v1.2.0 h1:YWfhGOrXwLGiqcC/u5EqG6YeS8nh+1fw0HEc85CVZro= +github.com/ryancurrah/gomodguard v1.2.0/go.mod h1:rNqbC4TOIdUDcVMSIpNNAzTbzXAZa6W5lnUepvuMMgQ= +github.com/ryanrolds/sqlclosecheck v0.3.0 h1:AZx+Bixh8zdUBxUA1NxbxVAS78vTPq4rCb8OUZI9xFw= +github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/securego/gosec/v2 v2.3.0 h1:y/9mCF2WPDbSDpL3QDWZD3HHGrSYw0QSHnCqTfs4JPE= -github.com/securego/gosec/v2 v2.3.0/go.mod h1:UzeVyUXbxukhLeHKV3VVqo7HdoQR9MrRfFmZYotn8ME= +github.com/securego/gosec/v2 v2.6.1 h1:+KCw+uz16FYfFyJ/A5aU6uP7mnrL+j1TbDnk1yN+8R0= +github.com/securego/gosec/v2 v2.6.1/go.mod h1:I76p3NTHBXsGhybUW+cEQ692q2Vp+A0Z6ZLzDIZy+Ao= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= +github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= github.com/shirou/gopsutil v0.0.0-20190901111213-e4ec7b275ada/go.mod h1:WWnYX4lzhCH5h/3YBfyVA3VbLYjlMZZAQcW9ojMexNc= github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc= github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e h1:MZM7FHLqUHYI0Y/mQAt3d2aYa0SiNms/hFqC9qJYolM= github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041 h1:llrF3Fs4018ePo4+G/HV/uQUqEI1HMDjCeOf2V6puPc= github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= -github.com/sourcegraph/go-diff v0.5.1 h1:gO6i5zugwzo1RVTvgvfwCOSVegNuvnNi6bAD1QCmkHs= -github.com/sourcegraph/go-diff v0.5.1/go.mod h1:j2dHj3m8aZgQO8lMTcTnBcXkRRRqi34cd2MNlA9u1mE= +github.com/sonatard/noctx v0.0.1 h1:VC1Qhl6Oxx9vvWo3UDgrGXYCeKCe3Wbw7qAWL6FrmTY= +github.com/sonatard/noctx v0.0.1/go.mod h1:9D2D/EoULe8Yy2joDHJj7bv3sZoq9AaSb8B4lqBjiZI= +github.com/sourcegraph/go-diff v0.6.1 h1:hmA1LzxW0n1c3Q4YbrFgg4P99GSnebYa3x8gr0HZqLQ= +github.com/sourcegraph/go-diff v0.6.1/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc= @@ -695,8 +758,8 @@ github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkU github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s= -github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= +github.com/spf13/cobra v1.1.1 h1:KfztREH0tPxJJ+geloSLaAkaPkr4ki2Er5quFV1TDo4= +github.com/spf13/cobra v1.1.1/go.mod h1:WnodtKOvamDL/PwE2M4iKs8aMDBZ5Q5klgD3qfVJQMI= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= @@ -705,15 +768,17 @@ github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= -github.com/spf13/viper v1.6.1 h1:VPZzIkznI1YhVMRi6vNFLHSwhnhReBfgTxIPccpfdZk= -github.com/spf13/viper v1.6.1/go.mod h1:t3iDnF5Jlj76alVNuyFBk5oUMCvsrkbvZK0WQdfDi5k= github.com/spf13/viper v1.7.0 h1:xVKxvI7ouOI5I+U9s2eeiUfMaWBVoXA3AWskkrqK0VM= github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/spf13/viper v1.7.1 h1:pM5oEahlgWv/WnHXpgbKz7iLIxRf65tye2Ci+XFK5sk= +github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/ssgreg/nlreturn/v2 v2.1.0 h1:6/s4Rc49L6Uo6RLjhWZGBpWWjfzk2yrf1nIW8m4wgVA= +github.com/ssgreg/nlreturn/v2 v2.1.0/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= @@ -722,36 +787,38 @@ github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/tdakkota/asciicheck v0.0.0-20200416190851-d7f85be797a2 h1:Xr9gkxfOP0KQWXKNqmwe8vEeSUiUj4Rlee9CMVX2ZUQ= github.com/tdakkota/asciicheck v0.0.0-20200416190851-d7f85be797a2/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= -github.com/tetafro/godot v0.3.7 h1:+mecr7RKrUKB5UQ1gwqEMn13sDKTyDR8KNIquB9mm+8= -github.com/tetafro/godot v0.3.7/go.mod h1:/7NLHhv08H1+8DNj0MElpAACw1ajsCuf3TKNQxA5S+0= +github.com/tetafro/godot v1.4.3 h1:pl95xaTCPdCb+l1v+uWrdcY+aiagEg19dv5rfxru57M= +github.com/tetafro/godot v1.4.3/go.mod h1:ah7jjYmOMnIjS9ku2krapvGQrFNtTLo9Z/qB3dGU1eU= github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e h1:RumXZ56IrCj4CL+g1b9OL/oH0QnsF976bC8xQFYUD5Q= github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tommy-muehle/go-mnd v1.3.1-0.20200224220436-e6f9a994e8fa h1:RC4maTWLKKwb7p1cnoygsbKIgNlJqSYBeAFON3Ar8As= -github.com/tommy-muehle/go-mnd v1.3.1-0.20200224220436-e6f9a994e8fa/go.mod h1:dSUh0FtTP8VhvkL1S+gUR1OKd9ZnSaozuI6r3m6wOig= +github.com/tomarrell/wrapcheck v0.0.0-20200807122107-df9e8bcb914d h1:3EZyvNUMsGD1QA8cu0STNn1L7I77rvhf2IhOcHYQhSw= +github.com/tomarrell/wrapcheck v0.0.0-20200807122107-df9e8bcb914d/go.mod h1:yiFB6fFoV7saXirUGfuK+cPtUh4NX/Hf5y2WC2lehu0= +github.com/tommy-muehle/go-mnd/v2 v2.3.1 h1:a1S4+4HSXDJMgeODJH/t0EEKxcVla6Tasw+Zx9JJMog= +github.com/tommy-muehle/go-mnd/v2 v2.3.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= github.com/toqueteos/webbrowser v1.2.0 h1:tVP/gpK69Fx+qMJKsLE7TD8LuGWPnEV71wBN9rrstGQ= github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJMir8RTqb4ayM= -github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= -github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ulikunitz/xz v0.5.5 h1:pFrO0lVpTBXLpYw+pnLj6TbvHuyjXMfjGeCwSqCVwok= github.com/ulikunitz/xz v0.5.5/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= github.com/ulikunitz/xz v0.5.7 h1:YvTNdFzX6+W5m9msiYg/zpkSURPPtOlzbqYjrFn7Yt4= github.com/ulikunitz/xz v0.5.7/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/ultraware/funlen v0.0.2 h1:Av96YVBwwNSe4MLR7iI/BIa3VyI7/djnto/pK3Uxbdo= -github.com/ultraware/funlen v0.0.2/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= +github.com/ultraware/funlen v0.0.3 h1:5ylVWm8wsNwH5aWo9438pwvsK0QiqVuUrt9bn7S/iLA= +github.com/ultraware/funlen v0.0.3/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= github.com/ultraware/whitespace v0.0.4 h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg= github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= github.com/uudashr/gocognit v1.0.1 h1:MoG2fZ0b/Eo7NXoIwCVFLG5JED3qgQz5/NEE+rOsjPs= github.com/uudashr/gocognit v1.0.1/go.mod h1:j44Ayx2KW4+oB6SWMv8KsmHzZrOInQav7D3cQMJ5JUM= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.2.0/go.mod h1:4vX61m6KN+xDduDNwXrhIAVZaZaZiQ1luJk8LWSxF3s= -github.com/valyala/quicktemplate v1.2.0/go.mod h1:EH+4AkTd43SvgIbQHYu59/cJyxDoOVRUAfrukLPuGJ4= +github.com/valyala/fasthttp v1.16.0/go.mod h1:YOKImeEosDdBPnxc0gy7INqi3m1zK6A+xl6TwOBhHCA= +github.com/valyala/quicktemplate v1.6.3/go.mod h1:fwPzK2fHuYEODzJ9pkw0ipCPNHZ2tD5KW4lOuSdPKzY= github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= github.com/vmihailenco/msgpack v3.3.3+incompatible h1:wapg9xDUZDzGCNFlwc5SqI1rvcciqcxEHac4CYj89xI= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= @@ -760,7 +827,6 @@ github.com/vmihailenco/msgpack v4.0.1+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6Ac github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -794,7 +860,6 @@ go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/ go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= @@ -844,12 +909,13 @@ golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0 h1:8pl+sMODzuvGJkmj2W4kZihvVb5mKm8pB/X44PIQHv8= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -862,7 +928,6 @@ golang.org/x/net v0.0.0-20190320064053-1272bf9dcd53/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -882,6 +947,7 @@ golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200602114024-627f9648deb9 h1:pNX+40auqi2JqRfOP1akLGtYcn15TUbkhwuCO3foqqM= golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= @@ -891,6 +957,9 @@ golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81R golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200904194848-62affa334b73 h1:MXfv8rhZWmFeqX3GNZRsd6vOLoaCHjYEX3qkRo3YBUA= golang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb h1:eBmm0M9fYhWpKZLjQUUKka/LtIxf46G4fxeEz5KJr9U= +golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0= @@ -907,6 +976,7 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -914,7 +984,6 @@ golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -932,14 +1001,17 @@ golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be h1:QAcqgptGM8IQBC9K/RC4o+O9YmqEm0diQn9QmZw/0mU= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -951,10 +1023,15 @@ golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121 h1:rITEj+UZHYC927n8GT97eC3zrpzXdb/voyeOuVKS46o= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae h1:Ih9Yo4hSPImZOpfGuA4bR/ORKTAbhZo2AbWNRCnevdo= golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201009025420-dfb3f7c4e634 h1:bNEHhJCnrwMKNMmOx3yAynp5vs5/gRy+XWFtZFu7NBM= +golang.org/x/sys v0.0.0-20201009025420-dfb3f7c4e634/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -962,6 +1039,8 @@ golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -969,11 +1048,10 @@ golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190221204921-83362c3779f5/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= @@ -985,7 +1063,6 @@ golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= @@ -993,11 +1070,12 @@ golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190719005602-e377ae9d6386/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190916130336-e45ffcd953cc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -1008,6 +1086,7 @@ golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -1019,20 +1098,40 @@ golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200331202046-9d5940d49312/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200426102838-f3a5411a4c3b/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200502202811-ed308ab3e770/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200625211823-6506e20df31f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200713011307-fd294ab11aed h1:+qzWo37K31KxduIYaBeMqJ8MUOyTayOQKpH9aDPLMSY= golang.org/x/tools v0.0.0-20200713011307-fd294ab11aed/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200717024301-6ddee64345a6/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200812195022-5ae4c3c160a0/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20200911193555-6422fca01df9 h1:M8mz5B5dntyYZSIscpClTVyMIJbg6kKKIyysZM++kf8= golang.org/x/tools v0.0.0-20200911193555-6422fca01df9/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201002184944-ecd9fd270d5d/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201011145850-ed2f50202694/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201028025901-8cd080b735b3/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201030010431-2feb2bb1ff51/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201114224030-61ea331ec02b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201118003311-bd56c0adb394/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201230224404-63754364767c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210101214203-2dba1e4ea05c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210102185154-773b96fafca2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105210202-9ed45478a130 h1:8qSBr5nyKsEgkP918Pu5FFDZpTtLIjXSo6mrtdVOFfk= +golang.org/x/tools v0.0.0-20210105210202-9ed45478a130/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= @@ -1100,7 +1199,6 @@ google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEd google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0 h1:AzbTB6ux+okLTzP8Ru1Xs41C303zdcfEht7MQnYJt5A= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= @@ -1154,6 +1252,8 @@ gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ= @@ -1167,14 +1267,16 @@ honnef.co/go/tools v0.0.1-2020.1.3 h1:sXmLre5bzIR6ypkjXCDI3jHPssRhc8KD/Ome589sc3 honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4 h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.6 h1:W18jzjh8mfPez+AwGLxmOImucz/IFjpNlrKVnaj2YVc= +honnef.co/go/tools v0.0.1-2020.1.6/go.mod h1:pyyisuGw24ruLjrr1ddx39WE0y9OooInRzEYLhQB2YY= +mvdan.cc/gofumpt v0.1.0 h1:hsVv+Y9UsZ/mFZTxJZuHVI6shSQCtzZ11h1JEFPAZLw= +mvdan.cc/gofumpt v0.1.0/go.mod h1:yXG1r1WqZVKWbVRtBWKWX9+CxGYfA51nSomhM0woR48= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= -mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f h1:Cq7MalBHYACRd6EesksG1Q8EoIAKOsiZviGKbOLIej4= -mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f/go.mod h1:4G1h5nDURzA3bwVMZIVpwbkw+04kSxk3rAtzlimaUJw= +mvdan.cc/unparam v0.0.0-20200501210554-b37ab49443f7 h1:kAREL6MPwpsk1/PQPFD3Eg7WAQR5mPTWZJaBiG5LDbY= +mvdan.cc/unparam v0.0.0-20200501210554-b37ab49443f7/go.mod h1:HGC5lll35J70Y5v7vCGb9oLhHoScFwkHDJm/05RdSTc= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4 h1:JPJh2pk3+X4lXAkZIk2RuE/7/FoK9maXw+TNPJhVS/c= -sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0= diff --git a/vendor/4d63.com/gochecknoglobals/LICENSE b/vendor/4d63.com/gochecknoglobals/LICENSE new file mode 100644 index 0000000..c401e66 --- /dev/null +++ b/vendor/4d63.com/gochecknoglobals/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Leigh McCulloch + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go b/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go new file mode 100644 index 0000000..5b6325d --- /dev/null +++ b/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go @@ -0,0 +1,154 @@ +package checknoglobals + +import ( + "flag" + "fmt" + "go/ast" + "go/token" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// allowedExpression is a struct representing packages and methods that will +// be an allowed combination to use as a global variable, f.ex. Name `regexp` +// and SelName `MustCompile`. +type allowedExpression struct { + Name string + SelName string +} + +const Doc = `check that no global variables exist + +This analyzer checks for global variables and errors on any found. + +A global variable is a variable declared in package scope and that can be read +and written to by any function within the package. Global variables can cause +side effects which are difficult to keep track of. A code in one function may +change the variables state while another unrelated chunk of code may be +effected by it.` + +// Analyzer provides an Analyzer that checks that there are no global +// variables, except for errors and variables containing regular +// expressions. +func Analyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "gochecknoglobals", + Doc: Doc, + Run: checkNoGlobals, + Flags: flags(), + RunDespiteErrors: true, + } +} + +func flags() flag.FlagSet { + flags := flag.NewFlagSet("", flag.ExitOnError) + flags.Bool("t", false, "Include tests") + + return *flags +} + +func isAllowed(v ast.Node) bool { + switch i := v.(type) { + case *ast.Ident: + return i.Name == "_" || i.Name == "version" || looksLikeError(i) + case *ast.CallExpr: + if expr, ok := i.Fun.(*ast.SelectorExpr); ok { + return isAllowedSelectorExpression(expr) + } + case *ast.CompositeLit: + if expr, ok := i.Type.(*ast.SelectorExpr); ok { + return isAllowedSelectorExpression(expr) + } + } + + return false +} + +func isAllowedSelectorExpression(v *ast.SelectorExpr) bool { + x, ok := v.X.(*ast.Ident) + if !ok { + return false + } + + allowList := []allowedExpression{ + {Name: "regexp", SelName: "MustCompile"}, + } + + for _, i := range allowList { + if x.Name == i.Name && v.Sel.Name == i.SelName { + return true + } + } + + return false +} + +// looksLikeError returns true if the AST identifier starts +// with 'err' or 'Err', or false otherwise. +// +// TODO: https://github.com/leighmcculloch/gochecknoglobals/issues/5 +func looksLikeError(i *ast.Ident) bool { + prefix := "err" + if i.IsExported() { + prefix = "Err" + } + return strings.HasPrefix(i.Name, prefix) +} + +func checkNoGlobals(pass *analysis.Pass) (interface{}, error) { + includeTests := pass.Analyzer.Flags.Lookup("t").Value.(flag.Getter).Get().(bool) + + for _, file := range pass.Files { + filename := pass.Fset.Position(file.Pos()).Filename + if !strings.HasSuffix(filename, ".go") { + continue + } + if !includeTests && strings.HasSuffix(filename, "_test.go") { + continue + } + + for _, decl := range file.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + if genDecl.Tok != token.VAR { + continue + } + for _, spec := range genDecl.Specs { + valueSpec := spec.(*ast.ValueSpec) + onlyAllowedValues := false + + for _, vn := range valueSpec.Values { + if isAllowed(vn) { + onlyAllowedValues = true + continue + } + + onlyAllowedValues = false + break + } + + if onlyAllowedValues { + continue + } + + for _, vn := range valueSpec.Names { + if isAllowed(vn) { + continue + } + + message := fmt.Sprintf("%s is a global variable", vn.Name) + pass.Report(analysis.Diagnostic{ + Pos: vn.Pos(), + Category: "global", + Message: message, + }) + } + } + } + } + + return nil, nil +} diff --git a/vendor/github.com/Djarvur/go-err113/README.adoc b/vendor/github.com/Djarvur/go-err113/README.adoc index b4c1f43..b26af40 100644 --- a/vendor/github.com/Djarvur/go-err113/README.adoc +++ b/vendor/github.com/Djarvur/go-err113/README.adoc @@ -23,7 +23,7 @@ So, `err113` reports every `==` and `!=` comparison for exact `error` type varia Also, any call of `errors.New()` and `fmt.Errorf()` methods are reported except the calls used to initialise package-level variables and the `fmt.Errorf()` calls wrapping the other errors. -Note: non-standard packages, like `github.com/pkg/errors` are ignored complitely. +Note: non-standard packages, like `github.com/pkg/errors` are ignored completely. == Install @@ -71,3 +71,5 @@ Flags: == Thanks To link:https://github.com/quasilyte[Iskander (Alex) Sharipov] for the really useful advices. + +To link:https://github.com/jackwhelpton[Jack Whelpton] for the bugfix provided. \ No newline at end of file diff --git a/vendor/github.com/Djarvur/go-err113/comparison.go b/vendor/github.com/Djarvur/go-err113/comparison.go index 7e7777d..777d752 100644 --- a/vendor/github.com/Djarvur/go-err113/comparison.go +++ b/vendor/github.com/Djarvur/go-err113/comparison.go @@ -99,5 +99,5 @@ func asImportedName(ex ast.Expr, info *types.Info) (string, bool) { return "", false } - return ep.Imported().Name(), true + return ep.Imported().Path(), true } diff --git a/vendor/github.com/Masterminds/semver/.travis.yml b/vendor/github.com/Masterminds/semver/.travis.yml new file mode 100644 index 0000000..096369d --- /dev/null +++ b/vendor/github.com/Masterminds/semver/.travis.yml @@ -0,0 +1,29 @@ +language: go + +go: + - 1.6.x + - 1.7.x + - 1.8.x + - 1.9.x + - 1.10.x + - 1.11.x + - 1.12.x + - tip + +# Setting sudo access to false will let Travis CI use containers rather than +# VMs to run the tests. For more details see: +# - http://docs.travis-ci.com/user/workers/container-based-infrastructure/ +# - http://docs.travis-ci.com/user/workers/standard-infrastructure/ +sudo: false + +script: + - make setup + - make test + +notifications: + webhooks: + urls: + - https://webhooks.gitter.im/e/06e3328629952dabe3e0 + on_success: change # options: [always|never|change] default: always + on_failure: always # options: [always|never|change] default: always + on_start: never # options: [always|never|change] default: always diff --git a/vendor/github.com/Masterminds/semver/CHANGELOG.md b/vendor/github.com/Masterminds/semver/CHANGELOG.md new file mode 100644 index 0000000..e405c9a --- /dev/null +++ b/vendor/github.com/Masterminds/semver/CHANGELOG.md @@ -0,0 +1,109 @@ +# 1.5.0 (2019-09-11) + +## Added + +- #103: Add basic fuzzing for `NewVersion()` (thanks @jesse-c) + +## Changed + +- #82: Clarify wildcard meaning in range constraints and update tests for it (thanks @greysteil) +- #83: Clarify caret operator range for pre-1.0.0 dependencies (thanks @greysteil) +- #72: Adding docs comment pointing to vert for a cli +- #71: Update the docs on pre-release comparator handling +- #89: Test with new go versions (thanks @thedevsaddam) +- #87: Added $ to ValidPrerelease for better validation (thanks @jeremycarroll) + +## Fixed + +- #78: Fix unchecked error in example code (thanks @ravron) +- #70: Fix the handling of pre-releases and the 0.0.0 release edge case +- #97: Fixed copyright file for proper display on GitHub +- #107: Fix handling prerelease when sorting alphanum and num +- #109: Fixed where Validate sometimes returns wrong message on error + +# 1.4.2 (2018-04-10) + +## Changed +- #72: Updated the docs to point to vert for a console appliaction +- #71: Update the docs on pre-release comparator handling + +## Fixed +- #70: Fix the handling of pre-releases and the 0.0.0 release edge case + +# 1.4.1 (2018-04-02) + +## Fixed +- Fixed #64: Fix pre-release precedence issue (thanks @uudashr) + +# 1.4.0 (2017-10-04) + +## Changed +- #61: Update NewVersion to parse ints with a 64bit int size (thanks @zknill) + +# 1.3.1 (2017-07-10) + +## Fixed +- Fixed #57: number comparisons in prerelease sometimes inaccurate + +# 1.3.0 (2017-05-02) + +## Added +- #45: Added json (un)marshaling support (thanks @mh-cbon) +- Stability marker. See https://masterminds.github.io/stability/ + +## Fixed +- #51: Fix handling of single digit tilde constraint (thanks @dgodd) + +## Changed +- #55: The godoc icon moved from png to svg + +# 1.2.3 (2017-04-03) + +## Fixed +- #46: Fixed 0.x.x and 0.0.x in constraints being treated as * + +# Release 1.2.2 (2016-12-13) + +## Fixed +- #34: Fixed issue where hyphen range was not working with pre-release parsing. + +# Release 1.2.1 (2016-11-28) + +## Fixed +- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha" + properly. + +# Release 1.2.0 (2016-11-04) + +## Added +- #20: Added MustParse function for versions (thanks @adamreese) +- #15: Added increment methods on versions (thanks @mh-cbon) + +## Fixed +- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and + might not satisfy the intended compatibility. The change here ignores pre-releases + on constraint checks (e.g., ~ or ^) when a pre-release is not part of the + constraint. For example, `^1.2.3` will ignore pre-releases while + `^1.2.3-alpha` will include them. + +# Release 1.1.1 (2016-06-30) + +## Changed +- Issue #9: Speed up version comparison performance (thanks @sdboyer) +- Issue #8: Added benchmarks (thanks @sdboyer) +- Updated Go Report Card URL to new location +- Updated Readme to add code snippet formatting (thanks @mh-cbon) +- Updating tagging to v[SemVer] structure for compatibility with other tools. + +# Release 1.1.0 (2016-03-11) + +- Issue #2: Implemented validation to provide reasons a versions failed a + constraint. + +# Release 1.0.1 (2015-12-31) + +- Fixed #1: * constraint failing on valid versions. + +# Release 1.0.0 (2015-10-20) + +- Initial release diff --git a/vendor/github.com/Masterminds/semver/LICENSE.txt b/vendor/github.com/Masterminds/semver/LICENSE.txt new file mode 100644 index 0000000..9ff7da9 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/LICENSE.txt @@ -0,0 +1,19 @@ +Copyright (C) 2014-2019, Matt Butcher and Matt Farina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/Masterminds/semver/Makefile b/vendor/github.com/Masterminds/semver/Makefile new file mode 100644 index 0000000..a7a1b4e --- /dev/null +++ b/vendor/github.com/Masterminds/semver/Makefile @@ -0,0 +1,36 @@ +.PHONY: setup +setup: + go get -u gopkg.in/alecthomas/gometalinter.v1 + gometalinter.v1 --install + +.PHONY: test +test: validate lint + @echo "==> Running tests" + go test -v + +.PHONY: validate +validate: + @echo "==> Running static validations" + @gometalinter.v1 \ + --disable-all \ + --enable deadcode \ + --severity deadcode:error \ + --enable gofmt \ + --enable gosimple \ + --enable ineffassign \ + --enable misspell \ + --enable vet \ + --tests \ + --vendor \ + --deadline 60s \ + ./... || exit_code=1 + +.PHONY: lint +lint: + @echo "==> Running linters" + @gometalinter.v1 \ + --disable-all \ + --enable golint \ + --vendor \ + --deadline 60s \ + ./... || : diff --git a/vendor/github.com/Masterminds/semver/README.md b/vendor/github.com/Masterminds/semver/README.md new file mode 100644 index 0000000..1b52d2f --- /dev/null +++ b/vendor/github.com/Masterminds/semver/README.md @@ -0,0 +1,194 @@ +# SemVer + +The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to: + +* Parse semantic versions +* Sort semantic versions +* Check if a semantic version fits within a set of constraints +* Optionally work with a `v` prefix + +[![Stability: +Active](https://masterminds.github.io/stability/active.svg)](https://masterminds.github.io/stability/active.html) +[![Build Status](https://travis-ci.org/Masterminds/semver.svg)](https://travis-ci.org/Masterminds/semver) [![Build status](https://ci.appveyor.com/api/projects/status/jfk66lib7hb985k8/branch/master?svg=true&passingText=windows%20build%20passing&failingText=windows%20build%20failing)](https://ci.appveyor.com/project/mattfarina/semver/branch/master) [![GoDoc](https://godoc.org/github.com/Masterminds/semver?status.svg)](https://godoc.org/github.com/Masterminds/semver) [![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver) + +If you are looking for a command line tool for version comparisons please see +[vert](https://github.com/Masterminds/vert) which uses this library. + +## Parsing Semantic Versions + +To parse a semantic version use the `NewVersion` function. For example, + +```go + v, err := semver.NewVersion("1.2.3-beta.1+build345") +``` + +If there is an error the version wasn't parseable. The version object has methods +to get the parts of the version, compare it to other versions, convert the +version back into a string, and get the original string. For more details +please see the [documentation](https://godoc.org/github.com/Masterminds/semver). + +## Sorting Semantic Versions + +A set of versions can be sorted using the [`sort`](https://golang.org/pkg/sort/) +package from the standard library. For example, + +```go + raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} + vs := make([]*semver.Version, len(raw)) + for i, r := range raw { + v, err := semver.NewVersion(r) + if err != nil { + t.Errorf("Error parsing version: %s", err) + } + + vs[i] = v + } + + sort.Sort(semver.Collection(vs)) +``` + +## Checking Version Constraints + +Checking a version against version constraints is one of the most featureful +parts of the package. + +```go + c, err := semver.NewConstraint(">= 1.2.3") + if err != nil { + // Handle constraint not being parseable. + } + + v, _ := semver.NewVersion("1.3") + if err != nil { + // Handle version not being parseable. + } + // Check if the version meets the constraints. The a variable will be true. + a := c.Check(v) +``` + +## Basic Comparisons + +There are two elements to the comparisons. First, a comparison string is a list +of comma separated and comparisons. These are then separated by || separated or +comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a +comparison that's greater than or equal to 1.2 and less than 3.0.0 or is +greater than or equal to 4.2.3. + +The basic comparisons are: + +* `=`: equal (aliased to no operator) +* `!=`: not equal +* `>`: greater than +* `<`: less than +* `>=`: greater than or equal to +* `<=`: less than or equal to + +## Working With Pre-release Versions + +Pre-releases, for those not familiar with them, are used for software releases +prior to stable or generally available releases. Examples of pre-releases include +development, alpha, beta, and release candidate releases. A pre-release may be +a version such as `1.2.3-beta.1` while the stable release would be `1.2.3`. In the +order of precidence, pre-releases come before their associated releases. In this +example `1.2.3-beta.1 < 1.2.3`. + +According to the Semantic Version specification pre-releases may not be +API compliant with their release counterpart. It says, + +> A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version. + +SemVer comparisons without a pre-release comparator will skip pre-release versions. +For example, `>=1.2.3` will skip pre-releases when looking at a list of releases +while `>=1.2.3-0` will evaluate and find pre-releases. + +The reason for the `0` as a pre-release version in the example comparison is +because pre-releases can only contain ASCII alphanumerics and hyphens (along with +`.` separators), per the spec. Sorting happens in ASCII sort order, again per the spec. The lowest character is a `0` in ASCII sort order (see an [ASCII Table](http://www.asciitable.com/)) + +Understanding ASCII sort ordering is important because A-Z comes before a-z. That +means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case +sensitivity doesn't apply here. This is due to ASCII sort ordering which is what +the spec specifies. + +## Hyphen Range Comparisons + +There are multiple methods to handle ranges and the first is hyphens ranges. +These look like: + +* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` +* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` + +## Wildcards In Comparisons + +The `x`, `X`, and `*` characters can be used as a wildcard character. This works +for all comparison operators. When used on the `=` operator it falls +back to the pack level comparison (see tilde below). For example, + +* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` +* `>= 1.2.x` is equivalent to `>= 1.2.0` +* `<= 2.x` is equivalent to `< 3` +* `*` is equivalent to `>= 0.0.0` + +## Tilde Range Comparisons (Patch) + +The tilde (`~`) comparison operator is for patch level ranges when a minor +version is specified and major level changes when the minor number is missing. +For example, + +* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` +* `~1` is equivalent to `>= 1, < 2` +* `~2.3` is equivalent to `>= 2.3, < 2.4` +* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` +* `~1.x` is equivalent to `>= 1, < 2` + +## Caret Range Comparisons (Major) + +The caret (`^`) comparison operator is for major level changes. This is useful +when comparisons of API versions as a major change is API breaking. For example, + +* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` +* `^0.0.1` is equivalent to `>= 0.0.1, < 1.0.0` +* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` +* `^2.3` is equivalent to `>= 2.3, < 3` +* `^2.x` is equivalent to `>= 2.0.0, < 3` + +# Validation + +In addition to testing a version against a constraint, a version can be validated +against a constraint. When validation fails a slice of errors containing why a +version didn't meet the constraint is returned. For example, + +```go + c, err := semver.NewConstraint("<= 1.2.3, >= 1.4") + if err != nil { + // Handle constraint not being parseable. + } + + v, _ := semver.NewVersion("1.3") + if err != nil { + // Handle version not being parseable. + } + + // Validate a version against a constraint. + a, msgs := c.Validate(v) + // a is false + for _, m := range msgs { + fmt.Println(m) + + // Loops over the errors which would read + // "1.3 is greater than 1.2.3" + // "1.3 is less than 1.4" + } +``` + +# Fuzzing + + [dvyukov/go-fuzz](https://github.com/dvyukov/go-fuzz) is used for fuzzing. + +1. `go-fuzz-build` +2. `go-fuzz -workdir=fuzz` + +# Contribute + +If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues) +or [create a pull request](https://github.com/Masterminds/semver/pulls). diff --git a/vendor/github.com/Masterminds/semver/appveyor.yml b/vendor/github.com/Masterminds/semver/appveyor.yml new file mode 100644 index 0000000..b2778df --- /dev/null +++ b/vendor/github.com/Masterminds/semver/appveyor.yml @@ -0,0 +1,44 @@ +version: build-{build}.{branch} + +clone_folder: C:\gopath\src\github.com\Masterminds\semver +shallow_clone: true + +environment: + GOPATH: C:\gopath + +platform: + - x64 + +install: + - go version + - go env + - go get -u gopkg.in/alecthomas/gometalinter.v1 + - set PATH=%PATH%;%GOPATH%\bin + - gometalinter.v1.exe --install + +build_script: + - go install -v ./... + +test_script: + - "gometalinter.v1 \ + --disable-all \ + --enable deadcode \ + --severity deadcode:error \ + --enable gofmt \ + --enable gosimple \ + --enable ineffassign \ + --enable misspell \ + --enable vet \ + --tests \ + --vendor \ + --deadline 60s \ + ./... || exit_code=1" + - "gometalinter.v1 \ + --disable-all \ + --enable golint \ + --vendor \ + --deadline 60s \ + ./... || :" + - go test -v + +deploy: off diff --git a/vendor/github.com/Masterminds/semver/collection.go b/vendor/github.com/Masterminds/semver/collection.go new file mode 100644 index 0000000..a782358 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/collection.go @@ -0,0 +1,24 @@ +package semver + +// Collection is a collection of Version instances and implements the sort +// interface. See the sort package for more details. +// https://golang.org/pkg/sort/ +type Collection []*Version + +// Len returns the length of a collection. The number of Version instances +// on the slice. +func (c Collection) Len() int { + return len(c) +} + +// Less is needed for the sort interface to compare two Version objects on the +// slice. If checks if one is less than the other. +func (c Collection) Less(i, j int) bool { + return c[i].LessThan(c[j]) +} + +// Swap is needed for the sort interface to replace the Version objects +// at two different positions in the slice. +func (c Collection) Swap(i, j int) { + c[i], c[j] = c[j], c[i] +} diff --git a/vendor/github.com/Masterminds/semver/constraints.go b/vendor/github.com/Masterminds/semver/constraints.go new file mode 100644 index 0000000..b94b934 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/constraints.go @@ -0,0 +1,423 @@ +package semver + +import ( + "errors" + "fmt" + "regexp" + "strings" +) + +// Constraints is one or more constraint that a semantic version can be +// checked against. +type Constraints struct { + constraints [][]*constraint +} + +// NewConstraint returns a Constraints instance that a Version instance can +// be checked against. If there is a parse error it will be returned. +func NewConstraint(c string) (*Constraints, error) { + + // Rewrite - ranges into a comparison operation. + c = rewriteRange(c) + + ors := strings.Split(c, "||") + or := make([][]*constraint, len(ors)) + for k, v := range ors { + cs := strings.Split(v, ",") + result := make([]*constraint, len(cs)) + for i, s := range cs { + pc, err := parseConstraint(s) + if err != nil { + return nil, err + } + + result[i] = pc + } + or[k] = result + } + + o := &Constraints{constraints: or} + return o, nil +} + +// Check tests if a version satisfies the constraints. +func (cs Constraints) Check(v *Version) bool { + // loop over the ORs and check the inner ANDs + for _, o := range cs.constraints { + joy := true + for _, c := range o { + if !c.check(v) { + joy = false + break + } + } + + if joy { + return true + } + } + + return false +} + +// Validate checks if a version satisfies a constraint. If not a slice of +// reasons for the failure are returned in addition to a bool. +func (cs Constraints) Validate(v *Version) (bool, []error) { + // loop over the ORs and check the inner ANDs + var e []error + + // Capture the prerelease message only once. When it happens the first time + // this var is marked + var prerelesase bool + for _, o := range cs.constraints { + joy := true + for _, c := range o { + // Before running the check handle the case there the version is + // a prerelease and the check is not searching for prereleases. + if c.con.pre == "" && v.pre != "" { + if !prerelesase { + em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) + e = append(e, em) + prerelesase = true + } + joy = false + + } else { + + if !c.check(v) { + em := fmt.Errorf(c.msg, v, c.orig) + e = append(e, em) + joy = false + } + } + } + + if joy { + return true, []error{} + } + } + + return false, e +} + +var constraintOps map[string]cfunc +var constraintMsg map[string]string +var constraintRegex *regexp.Regexp + +func init() { + constraintOps = map[string]cfunc{ + "": constraintTildeOrEqual, + "=": constraintTildeOrEqual, + "!=": constraintNotEqual, + ">": constraintGreaterThan, + "<": constraintLessThan, + ">=": constraintGreaterThanEqual, + "=>": constraintGreaterThanEqual, + "<=": constraintLessThanEqual, + "=<": constraintLessThanEqual, + "~": constraintTilde, + "~>": constraintTilde, + "^": constraintCaret, + } + + constraintMsg = map[string]string{ + "": "%s is not equal to %s", + "=": "%s is not equal to %s", + "!=": "%s is equal to %s", + ">": "%s is less than or equal to %s", + "<": "%s is greater than or equal to %s", + ">=": "%s is less than %s", + "=>": "%s is less than %s", + "<=": "%s is greater than %s", + "=<": "%s is greater than %s", + "~": "%s does not have same major and minor version as %s", + "~>": "%s does not have same major and minor version as %s", + "^": "%s does not have same major version as %s", + } + + ops := make([]string, 0, len(constraintOps)) + for k := range constraintOps { + ops = append(ops, regexp.QuoteMeta(k)) + } + + constraintRegex = regexp.MustCompile(fmt.Sprintf( + `^\s*(%s)\s*(%s)\s*$`, + strings.Join(ops, "|"), + cvRegex)) + + constraintRangeRegex = regexp.MustCompile(fmt.Sprintf( + `\s*(%s)\s+-\s+(%s)\s*`, + cvRegex, cvRegex)) +} + +// An individual constraint +type constraint struct { + // The callback function for the restraint. It performs the logic for + // the constraint. + function cfunc + + msg string + + // The version used in the constraint check. For example, if a constraint + // is '<= 2.0.0' the con a version instance representing 2.0.0. + con *Version + + // The original parsed version (e.g., 4.x from != 4.x) + orig string + + // When an x is used as part of the version (e.g., 1.x) + minorDirty bool + dirty bool + patchDirty bool +} + +// Check if a version meets the constraint +func (c *constraint) check(v *Version) bool { + return c.function(v, c) +} + +type cfunc func(v *Version, c *constraint) bool + +func parseConstraint(c string) (*constraint, error) { + m := constraintRegex.FindStringSubmatch(c) + if m == nil { + return nil, fmt.Errorf("improper constraint: %s", c) + } + + ver := m[2] + orig := ver + minorDirty := false + patchDirty := false + dirty := false + if isX(m[3]) { + ver = "0.0.0" + dirty = true + } else if isX(strings.TrimPrefix(m[4], ".")) || m[4] == "" { + minorDirty = true + dirty = true + ver = fmt.Sprintf("%s.0.0%s", m[3], m[6]) + } else if isX(strings.TrimPrefix(m[5], ".")) { + dirty = true + patchDirty = true + ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6]) + } + + con, err := NewVersion(ver) + if err != nil { + + // The constraintRegex should catch any regex parsing errors. So, + // we should never get here. + return nil, errors.New("constraint Parser Error") + } + + cs := &constraint{ + function: constraintOps[m[1]], + msg: constraintMsg[m[1]], + con: con, + orig: orig, + minorDirty: minorDirty, + patchDirty: patchDirty, + dirty: dirty, + } + return cs, nil +} + +// Constraint functions +func constraintNotEqual(v *Version, c *constraint) bool { + if c.dirty { + + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if c.con.Major() != v.Major() { + return true + } + if c.con.Minor() != v.Minor() && !c.minorDirty { + return true + } else if c.minorDirty { + return false + } + + return false + } + + return !v.Equal(c.con) +} + +func constraintGreaterThan(v *Version, c *constraint) bool { + + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + return v.Compare(c.con) == 1 +} + +func constraintLessThan(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if !c.dirty { + return v.Compare(c.con) < 0 + } + + if v.Major() > c.con.Major() { + return false + } else if v.Minor() > c.con.Minor() && !c.minorDirty { + return false + } + + return true +} + +func constraintGreaterThanEqual(v *Version, c *constraint) bool { + + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + return v.Compare(c.con) >= 0 +} + +func constraintLessThanEqual(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if !c.dirty { + return v.Compare(c.con) <= 0 + } + + if v.Major() > c.con.Major() { + return false + } else if v.Minor() > c.con.Minor() && !c.minorDirty { + return false + } + + return true +} + +// ~*, ~>* --> >= 0.0.0 (any) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0 +func constraintTilde(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if v.LessThan(c.con) { + return false + } + + // ~0.0.0 is a special case where all constraints are accepted. It's + // equivalent to >= 0.0.0. + if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 && + !c.minorDirty && !c.patchDirty { + return true + } + + if v.Major() != c.con.Major() { + return false + } + + if v.Minor() != c.con.Minor() && !c.minorDirty { + return false + } + + return true +} + +// When there is a .x (dirty) status it automatically opts in to ~. Otherwise +// it's a straight = +func constraintTildeOrEqual(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if c.dirty { + c.msg = constraintMsg["~"] + return constraintTilde(v, c) + } + + return v.Equal(c.con) +} + +// ^* --> (any) +// ^2, ^2.x, ^2.x.x --> >=2.0.0, <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0, <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0, <2.0.0 +// ^1.2.3 --> >=1.2.3, <2.0.0 +// ^1.2.0 --> >=1.2.0, <2.0.0 +func constraintCaret(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if v.LessThan(c.con) { + return false + } + + if v.Major() != c.con.Major() { + return false + } + + return true +} + +var constraintRangeRegex *regexp.Regexp + +const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` + + `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + + `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + +func isX(x string) bool { + switch x { + case "x", "*", "X": + return true + default: + return false + } +} + +func rewriteRange(i string) string { + m := constraintRangeRegex.FindAllStringSubmatch(i, -1) + if m == nil { + return i + } + o := i + for _, v := range m { + t := fmt.Sprintf(">= %s, <= %s", v[1], v[11]) + o = strings.Replace(o, v[0], t, 1) + } + + return o +} diff --git a/vendor/github.com/Masterminds/semver/doc.go b/vendor/github.com/Masterminds/semver/doc.go new file mode 100644 index 0000000..6a6c24c --- /dev/null +++ b/vendor/github.com/Masterminds/semver/doc.go @@ -0,0 +1,115 @@ +/* +Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go. + +Specifically it provides the ability to: + + * Parse semantic versions + * Sort semantic versions + * Check if a semantic version fits within a set of constraints + * Optionally work with a `v` prefix + +Parsing Semantic Versions + +To parse a semantic version use the `NewVersion` function. For example, + + v, err := semver.NewVersion("1.2.3-beta.1+build345") + +If there is an error the version wasn't parseable. The version object has methods +to get the parts of the version, compare it to other versions, convert the +version back into a string, and get the original string. For more details +please see the documentation at https://godoc.org/github.com/Masterminds/semver. + +Sorting Semantic Versions + +A set of versions can be sorted using the `sort` package from the standard library. +For example, + + raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} + vs := make([]*semver.Version, len(raw)) + for i, r := range raw { + v, err := semver.NewVersion(r) + if err != nil { + t.Errorf("Error parsing version: %s", err) + } + + vs[i] = v + } + + sort.Sort(semver.Collection(vs)) + +Checking Version Constraints + +Checking a version against version constraints is one of the most featureful +parts of the package. + + c, err := semver.NewConstraint(">= 1.2.3") + if err != nil { + // Handle constraint not being parseable. + } + + v, err := semver.NewVersion("1.3") + if err != nil { + // Handle version not being parseable. + } + // Check if the version meets the constraints. The a variable will be true. + a := c.Check(v) + +Basic Comparisons + +There are two elements to the comparisons. First, a comparison string is a list +of comma separated and comparisons. These are then separated by || separated or +comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a +comparison that's greater than or equal to 1.2 and less than 3.0.0 or is +greater than or equal to 4.2.3. + +The basic comparisons are: + + * `=`: equal (aliased to no operator) + * `!=`: not equal + * `>`: greater than + * `<`: less than + * `>=`: greater than or equal to + * `<=`: less than or equal to + +Hyphen Range Comparisons + +There are multiple methods to handle ranges and the first is hyphens ranges. +These look like: + + * `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` + * `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` + +Wildcards In Comparisons + +The `x`, `X`, and `*` characters can be used as a wildcard character. This works +for all comparison operators. When used on the `=` operator it falls +back to the pack level comparison (see tilde below). For example, + + * `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` + * `>= 1.2.x` is equivalent to `>= 1.2.0` + * `<= 2.x` is equivalent to `<= 3` + * `*` is equivalent to `>= 0.0.0` + +Tilde Range Comparisons (Patch) + +The tilde (`~`) comparison operator is for patch level ranges when a minor +version is specified and major level changes when the minor number is missing. +For example, + + * `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` + * `~1` is equivalent to `>= 1, < 2` + * `~2.3` is equivalent to `>= 2.3, < 2.4` + * `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` + * `~1.x` is equivalent to `>= 1, < 2` + +Caret Range Comparisons (Major) + +The caret (`^`) comparison operator is for major level changes. This is useful +when comparisons of API versions as a major change is API breaking. For example, + + * `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` + * `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` + * `^2.3` is equivalent to `>= 2.3, < 3` + * `^2.x` is equivalent to `>= 2.0.0, < 3` +*/ +package semver diff --git a/vendor/github.com/Masterminds/semver/version.go b/vendor/github.com/Masterminds/semver/version.go new file mode 100644 index 0000000..400d4f9 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/version.go @@ -0,0 +1,425 @@ +package semver + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "regexp" + "strconv" + "strings" +) + +// The compiled version of the regex created at init() is cached here so it +// only needs to be created once. +var versionRegex *regexp.Regexp +var validPrereleaseRegex *regexp.Regexp + +var ( + // ErrInvalidSemVer is returned a version is found to be invalid when + // being parsed. + ErrInvalidSemVer = errors.New("Invalid Semantic Version") + + // ErrInvalidMetadata is returned when the metadata is an invalid format + ErrInvalidMetadata = errors.New("Invalid Metadata string") + + // ErrInvalidPrerelease is returned when the pre-release is an invalid format + ErrInvalidPrerelease = errors.New("Invalid Prerelease string") +) + +// SemVerRegex is the regular expression used to parse a semantic version. +const SemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` + + `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + + `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + +// ValidPrerelease is the regular expression which validates +// both prerelease and metadata values. +const ValidPrerelease string = `^([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*)$` + +// Version represents a single semantic version. +type Version struct { + major, minor, patch int64 + pre string + metadata string + original string +} + +func init() { + versionRegex = regexp.MustCompile("^" + SemVerRegex + "$") + validPrereleaseRegex = regexp.MustCompile(ValidPrerelease) +} + +// NewVersion parses a given version and returns an instance of Version or +// an error if unable to parse the version. +func NewVersion(v string) (*Version, error) { + m := versionRegex.FindStringSubmatch(v) + if m == nil { + return nil, ErrInvalidSemVer + } + + sv := &Version{ + metadata: m[8], + pre: m[5], + original: v, + } + + var temp int64 + temp, err := strconv.ParseInt(m[1], 10, 64) + if err != nil { + return nil, fmt.Errorf("Error parsing version segment: %s", err) + } + sv.major = temp + + if m[2] != "" { + temp, err = strconv.ParseInt(strings.TrimPrefix(m[2], "."), 10, 64) + if err != nil { + return nil, fmt.Errorf("Error parsing version segment: %s", err) + } + sv.minor = temp + } else { + sv.minor = 0 + } + + if m[3] != "" { + temp, err = strconv.ParseInt(strings.TrimPrefix(m[3], "."), 10, 64) + if err != nil { + return nil, fmt.Errorf("Error parsing version segment: %s", err) + } + sv.patch = temp + } else { + sv.patch = 0 + } + + return sv, nil +} + +// MustParse parses a given version and panics on error. +func MustParse(v string) *Version { + sv, err := NewVersion(v) + if err != nil { + panic(err) + } + return sv +} + +// String converts a Version object to a string. +// Note, if the original version contained a leading v this version will not. +// See the Original() method to retrieve the original value. Semantic Versions +// don't contain a leading v per the spec. Instead it's optional on +// implementation. +func (v *Version) String() string { + var buf bytes.Buffer + + fmt.Fprintf(&buf, "%d.%d.%d", v.major, v.minor, v.patch) + if v.pre != "" { + fmt.Fprintf(&buf, "-%s", v.pre) + } + if v.metadata != "" { + fmt.Fprintf(&buf, "+%s", v.metadata) + } + + return buf.String() +} + +// Original returns the original value passed in to be parsed. +func (v *Version) Original() string { + return v.original +} + +// Major returns the major version. +func (v *Version) Major() int64 { + return v.major +} + +// Minor returns the minor version. +func (v *Version) Minor() int64 { + return v.minor +} + +// Patch returns the patch version. +func (v *Version) Patch() int64 { + return v.patch +} + +// Prerelease returns the pre-release version. +func (v *Version) Prerelease() string { + return v.pre +} + +// Metadata returns the metadata on the version. +func (v *Version) Metadata() string { + return v.metadata +} + +// originalVPrefix returns the original 'v' prefix if any. +func (v *Version) originalVPrefix() string { + + // Note, only lowercase v is supported as a prefix by the parser. + if v.original != "" && v.original[:1] == "v" { + return v.original[:1] + } + return "" +} + +// IncPatch produces the next patch version. +// If the current version does not have prerelease/metadata information, +// it unsets metadata and prerelease values, increments patch number. +// If the current version has any of prerelease or metadata information, +// it unsets both values and keeps curent patch value +func (v Version) IncPatch() Version { + vNext := v + // according to http://semver.org/#spec-item-9 + // Pre-release versions have a lower precedence than the associated normal version. + // according to http://semver.org/#spec-item-10 + // Build metadata SHOULD be ignored when determining version precedence. + if v.pre != "" { + vNext.metadata = "" + vNext.pre = "" + } else { + vNext.metadata = "" + vNext.pre = "" + vNext.patch = v.patch + 1 + } + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext +} + +// IncMinor produces the next minor version. +// Sets patch to 0. +// Increments minor number. +// Unsets metadata. +// Unsets prerelease status. +func (v Version) IncMinor() Version { + vNext := v + vNext.metadata = "" + vNext.pre = "" + vNext.patch = 0 + vNext.minor = v.minor + 1 + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext +} + +// IncMajor produces the next major version. +// Sets patch to 0. +// Sets minor to 0. +// Increments major number. +// Unsets metadata. +// Unsets prerelease status. +func (v Version) IncMajor() Version { + vNext := v + vNext.metadata = "" + vNext.pre = "" + vNext.patch = 0 + vNext.minor = 0 + vNext.major = v.major + 1 + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext +} + +// SetPrerelease defines the prerelease value. +// Value must not include the required 'hypen' prefix. +func (v Version) SetPrerelease(prerelease string) (Version, error) { + vNext := v + if len(prerelease) > 0 && !validPrereleaseRegex.MatchString(prerelease) { + return vNext, ErrInvalidPrerelease + } + vNext.pre = prerelease + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext, nil +} + +// SetMetadata defines metadata value. +// Value must not include the required 'plus' prefix. +func (v Version) SetMetadata(metadata string) (Version, error) { + vNext := v + if len(metadata) > 0 && !validPrereleaseRegex.MatchString(metadata) { + return vNext, ErrInvalidMetadata + } + vNext.metadata = metadata + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext, nil +} + +// LessThan tests if one version is less than another one. +func (v *Version) LessThan(o *Version) bool { + return v.Compare(o) < 0 +} + +// GreaterThan tests if one version is greater than another one. +func (v *Version) GreaterThan(o *Version) bool { + return v.Compare(o) > 0 +} + +// Equal tests if two versions are equal to each other. +// Note, versions can be equal with different metadata since metadata +// is not considered part of the comparable version. +func (v *Version) Equal(o *Version) bool { + return v.Compare(o) == 0 +} + +// Compare compares this version to another one. It returns -1, 0, or 1 if +// the version smaller, equal, or larger than the other version. +// +// Versions are compared by X.Y.Z. Build metadata is ignored. Prerelease is +// lower than the version without a prerelease. +func (v *Version) Compare(o *Version) int { + // Compare the major, minor, and patch version for differences. If a + // difference is found return the comparison. + if d := compareSegment(v.Major(), o.Major()); d != 0 { + return d + } + if d := compareSegment(v.Minor(), o.Minor()); d != 0 { + return d + } + if d := compareSegment(v.Patch(), o.Patch()); d != 0 { + return d + } + + // At this point the major, minor, and patch versions are the same. + ps := v.pre + po := o.Prerelease() + + if ps == "" && po == "" { + return 0 + } + if ps == "" { + return 1 + } + if po == "" { + return -1 + } + + return comparePrerelease(ps, po) +} + +// UnmarshalJSON implements JSON.Unmarshaler interface. +func (v *Version) UnmarshalJSON(b []byte) error { + var s string + if err := json.Unmarshal(b, &s); err != nil { + return err + } + temp, err := NewVersion(s) + if err != nil { + return err + } + v.major = temp.major + v.minor = temp.minor + v.patch = temp.patch + v.pre = temp.pre + v.metadata = temp.metadata + v.original = temp.original + temp = nil + return nil +} + +// MarshalJSON implements JSON.Marshaler interface. +func (v *Version) MarshalJSON() ([]byte, error) { + return json.Marshal(v.String()) +} + +func compareSegment(v, o int64) int { + if v < o { + return -1 + } + if v > o { + return 1 + } + + return 0 +} + +func comparePrerelease(v, o string) int { + + // split the prelease versions by their part. The separator, per the spec, + // is a . + sparts := strings.Split(v, ".") + oparts := strings.Split(o, ".") + + // Find the longer length of the parts to know how many loop iterations to + // go through. + slen := len(sparts) + olen := len(oparts) + + l := slen + if olen > slen { + l = olen + } + + // Iterate over each part of the prereleases to compare the differences. + for i := 0; i < l; i++ { + // Since the lentgh of the parts can be different we need to create + // a placeholder. This is to avoid out of bounds issues. + stemp := "" + if i < slen { + stemp = sparts[i] + } + + otemp := "" + if i < olen { + otemp = oparts[i] + } + + d := comparePrePart(stemp, otemp) + if d != 0 { + return d + } + } + + // Reaching here means two versions are of equal value but have different + // metadata (the part following a +). They are not identical in string form + // but the version comparison finds them to be equal. + return 0 +} + +func comparePrePart(s, o string) int { + // Fastpath if they are equal + if s == o { + return 0 + } + + // When s or o are empty we can use the other in an attempt to determine + // the response. + if s == "" { + if o != "" { + return -1 + } + return 1 + } + + if o == "" { + if s != "" { + return 1 + } + return -1 + } + + // When comparing strings "99" is greater than "103". To handle + // cases like this we need to detect numbers and compare them. According + // to the semver spec, numbers are always positive. If there is a - at the + // start like -99 this is to be evaluated as an alphanum. numbers always + // have precedence over alphanum. Parsing as Uints because negative numbers + // are ignored. + + oi, n1 := strconv.ParseUint(o, 10, 64) + si, n2 := strconv.ParseUint(s, 10, 64) + + // The case where both are strings compare the strings + if n1 != nil && n2 != nil { + if s > o { + return 1 + } + return -1 + } else if n1 != nil { + // o is a string and s is a number + return -1 + } else if n2 != nil { + // s is a string and o is a number + return 1 + } + // Both are numbers + if si > oi { + return 1 + } + return -1 + +} diff --git a/vendor/github.com/Masterminds/semver/version_fuzz.go b/vendor/github.com/Masterminds/semver/version_fuzz.go new file mode 100644 index 0000000..b42bcd6 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/version_fuzz.go @@ -0,0 +1,10 @@ +// +build gofuzz + +package semver + +func Fuzz(data []byte) int { + if _, err := NewVersion(string(data)); err != nil { + return 0 + } + return 1 +} diff --git a/vendor/github.com/ashanbrown/forbidigo/LICENSE b/vendor/github.com/ashanbrown/forbidigo/LICENSE new file mode 100644 index 0000000..dc1d47a --- /dev/null +++ b/vendor/github.com/ashanbrown/forbidigo/LICENSE @@ -0,0 +1,13 @@ +Copyright 2019 Andrew Shannon Brown + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go new file mode 100644 index 0000000..aa93df6 --- /dev/null +++ b/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go @@ -0,0 +1,128 @@ +// nouse provides a linter for forbidding the use of specific identifiers +package forbidigo + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "log" + "regexp" +) + +type Issue interface { + Details() string + Position() token.Position + String() string +} + +type UsedIssue struct { + identifier string + pattern string + position token.Position +} + +func (a UsedIssue) Details() string { + return fmt.Sprintf("use of `%s` forbidden by pattern `%s`", a.identifier, a.pattern) +} + +func (a UsedIssue) Position() token.Position { + return a.position +} + +func (a UsedIssue) String() string { return toString(a) } + +func toString(i Issue) string { + return fmt.Sprintf("%s at %s", i.Details(), i.Position()) +} + +type Linter struct { + patterns []*regexp.Regexp +} + +func DefaultPatterns() []string { + return []string{`^fmt\.Print(|f|ln)$`} +} + +func NewLinter(patterns []string) (*Linter, error) { + if len(patterns) == 0 { + patterns = DefaultPatterns() + } + compiledPatterns := make([]*regexp.Regexp, 0, len(patterns)) + for _, p := range patterns { + re, err := regexp.Compile(p) + if err != nil { + return nil, fmt.Errorf("unable to compile pattern `%s`: %s", p, err) + } + compiledPatterns = append(compiledPatterns, re) + } + return &Linter{ + patterns: compiledPatterns, + }, nil +} + +type visitor struct { + linter *Linter + comments []*ast.CommentGroup + + fset *token.FileSet + issues []Issue +} + +func (l *Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { + var issues []Issue // nolint:prealloc // we don't know how many there will be + for _, node := range nodes { + var comments []*ast.CommentGroup + if file, ok := node.(*ast.File); ok { + comments = file.Comments + } + visitor := visitor{ + linter: l, + fset: fset, + comments: comments, + } + ast.Walk(&visitor, node) + issues = append(issues, visitor.issues...) + } + return issues, nil +} + +func (v *visitor) Visit(node ast.Node) ast.Visitor { + switch node.(type) { + case *ast.SelectorExpr: + case *ast.Ident: + default: + return v + } + for _, p := range v.linter.patterns { + if p.MatchString(v.textFor(node)) && !v.permit(node) { + v.issues = append(v.issues, UsedIssue{ + identifier: v.textFor(node), + pattern: p.String(), + position: v.fset.Position(node.Pos()), + }) + } + } + return nil +} + +func (v *visitor) textFor(node ast.Node) string { + buf := new(bytes.Buffer) + if err := printer.Fprint(buf, v.fset, node); err != nil { + log.Fatalf("ERROR: unable to print node at %s: %s", v.fset.Position(node.Pos()), err) + } + return buf.String() +} + +func (v *visitor) permit(node ast.Node) bool { + nodePos := v.fset.Position(node.Pos()) + var nolint = regexp.MustCompile(fmt.Sprintf(`^permit:%s\b`, regexp.QuoteMeta(v.textFor(node)))) + for _, c := range v.comments { + commentPos := v.fset.Position(c.Pos()) + if commentPos.Line == nodePos.Line && nolint.MatchString(c.Text()) { + return true + } + } + return false +} diff --git a/vendor/github.com/ashanbrown/makezero/LICENSE b/vendor/github.com/ashanbrown/makezero/LICENSE new file mode 100644 index 0000000..dc1d47a --- /dev/null +++ b/vendor/github.com/ashanbrown/makezero/LICENSE @@ -0,0 +1,13 @@ +Copyright 2019 Andrew Shannon Brown + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/github.com/ashanbrown/makezero/makezero/makezero.go b/vendor/github.com/ashanbrown/makezero/makezero/makezero.go new file mode 100644 index 0000000..6bf230b --- /dev/null +++ b/vendor/github.com/ashanbrown/makezero/makezero/makezero.go @@ -0,0 +1,197 @@ +// makezero provides a linter for appends to slices initialized with non-zero length. +package makezero + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "go/types" + "log" + "regexp" +) + +type Issue interface { + Details() string + Position() token.Position + String() string +} + +type AppendIssue struct { + name string + position token.Position +} + +func (a AppendIssue) Details() string { + return fmt.Sprintf("append to slice `%s` with non-zero initialized length", a.name) +} + +func (a AppendIssue) Position() token.Position { + return a.position +} + +func (a AppendIssue) String() string { return toString(a) } + +type MustHaveNonZeroInitLenIssue struct { + name string + position token.Position +} + +func (i MustHaveNonZeroInitLenIssue) Details() string { + return fmt.Sprintf("slice `%s` does not have non-zero initial length", i.name) +} + +func (i MustHaveNonZeroInitLenIssue) Position() token.Position { + return i.position +} + +func (i MustHaveNonZeroInitLenIssue) String() string { return toString(i) } + +func toString(i Issue) string { + return fmt.Sprintf("%s at %s", i.Details(), i.Position()) +} + +type visitor struct { + initLenMustBeZero bool + + comments []*ast.CommentGroup // comments to apply during this visit + info *types.Info + + nonZeroLengthSliceDecls map[interface{}]struct{} + fset *token.FileSet + issues []Issue +} + +type Linter struct { + initLenMustBeZero bool +} + +func NewLinter(initialLengthMustBeZero bool) *Linter { + return &Linter{ + initLenMustBeZero: initialLengthMustBeZero, + } +} + +func (l Linter) Run(fset *token.FileSet, info *types.Info, nodes ...ast.Node) ([]Issue, error) { + var issues []Issue // nolint:prealloc // don't know how many there will be + for _, node := range nodes { + var comments []*ast.CommentGroup + if file, ok := node.(*ast.File); ok { + comments = file.Comments + } + visitor := visitor{ + nonZeroLengthSliceDecls: make(map[interface{}]struct{}), + initLenMustBeZero: l.initLenMustBeZero, + info: info, + fset: fset, + comments: comments, + } + ast.Walk(&visitor, node) + issues = append(issues, visitor.issues...) + } + return issues, nil +} + +func (v *visitor) Visit(node ast.Node) ast.Visitor { + switch node := node.(type) { + case *ast.CallExpr: + fun, ok := node.Fun.(*ast.Ident) + if !ok || fun.Name != "append" { + break + } + if sliceIdent, ok := node.Args[0].(*ast.Ident); ok && + v.hasNonZeroInitialLength(sliceIdent) && + !v.hasNoLintOnSameLine(fun) { + v.issues = append(v.issues, AppendIssue{name: sliceIdent.Name, position: v.fset.Position(fun.Pos())}) + } + case *ast.AssignStmt: + for i, right := range node.Rhs { + if right, ok := right.(*ast.CallExpr); ok { + fun, ok := right.Fun.(*ast.Ident) + if !ok || fun.Name != "make" { + continue + } + left := node.Lhs[i] + if len(right.Args) == 2 { + // ignore if not a slice or it has explicit zero length + if !v.isSlice(right.Args[0]) { + break + } else if lit, ok := right.Args[1].(*ast.BasicLit); ok && lit.Kind == token.INT && lit.Value == "0" { + break + } + if v.initLenMustBeZero && !v.hasNoLintOnSameLine(fun) { + v.issues = append(v.issues, MustHaveNonZeroInitLenIssue{ + name: v.textFor(left), + position: v.fset.Position(node.Pos()), + }) + } + v.recordNonZeroLengthSlices(left) + } + } + } + } + return v +} + +func (v *visitor) textFor(node ast.Node) string { + typeBuf := new(bytes.Buffer) + if err := printer.Fprint(typeBuf, v.fset, node); err != nil { + log.Fatalf("ERROR: unable to print type: %s", err) + } + return typeBuf.String() +} + +func (v *visitor) hasNonZeroInitialLength(ident *ast.Ident) bool { + if ident.Obj == nil { + log.Printf("WARNING: could not determine with %q at %s is a slice (missing object type)", + ident.Name, v.fset.Position(ident.Pos()).String()) + return false + } + _, exists := v.nonZeroLengthSliceDecls[ident.Obj.Decl] + return exists +} + +func (v *visitor) recordNonZeroLengthSlices(node ast.Node) { + ident, ok := node.(*ast.Ident) + if !ok { + return + } + v.nonZeroLengthSliceDecls[ident.Obj.Decl] = struct{}{} +} + +func (v *visitor) isSlice(node ast.Node) bool { + // determine type if this is a user-defined type + if ident, ok := node.(*ast.Ident); ok { + obj := ident.Obj + if obj == nil { + if v.info != nil { + _, ok := v.info.ObjectOf(ident).Type().(*types.Slice) + return ok + } + return false + } + spec, ok := obj.Decl.(*ast.TypeSpec) + if !ok { + return false + } + node = spec.Type + } + + if node, ok := node.(*ast.ArrayType); ok { + return node.Len == nil // only slices have zero length + } + return false +} + +func (v *visitor) hasNoLintOnSameLine(node ast.Node) bool { + var nolint = regexp.MustCompile(`^\s*nozero\b`) + nodePos := v.fset.Position(node.Pos()) + for _, c := range v.comments { + commentPos := v.fset.Position(c.Pos()) + if commentPos.Line == nodePos.Line && nolint.MatchString(c.Text()) { + return true + } + } + return false +} diff --git a/vendor/github.com/bombsimon/wsl/v3/wsl.go b/vendor/github.com/bombsimon/wsl/v3/wsl.go index 3b4a4e9..31520fb 100644 --- a/vendor/github.com/bombsimon/wsl/v3/wsl.go +++ b/vendor/github.com/bombsimon/wsl/v3/wsl.go @@ -398,6 +398,18 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) { if !cuddledWithLastStmt { checkingErr := atLeastOneInListsMatch(rightAndLeftHandSide, p.config.ErrorVariableNames) if checkingErr { + // We only want to enforce cuddling error checks if the + // error was assigned on the line above. See + // https://github.com/bombsimon/wsl/issues/78. + // This is needed since `assignedOnLineAbove` is not + // actually just assignments but everything from LHS in the + // previous statement. This means that if previous line was + // `if err ...`, `err` will now be in the list + // `assignedOnLineAbove`. + if _, ok := previousStatement.(*ast.AssignStmt); !ok { + continue + } + if checkingErrInitializedInline() { continue } diff --git a/vendor/github.com/daixiang0/gci/LICENSE b/vendor/github.com/daixiang0/gci/LICENSE new file mode 100644 index 0000000..e1292f7 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2020, Xiang Dai +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go new file mode 100644 index 0000000..7e31b87 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go @@ -0,0 +1,368 @@ +package gci + +import ( + "bytes" + "fmt" + "io" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "sort" + "strings" +) + +const ( + // pkg type: standard, remote, local + standard int = iota + // 3rd-party packages + remote + local + + commentFlag = "//" +) + +var ( + importStartFlag = []byte(` +import ( +`) + importEndFlag = []byte(` +) +`) +) + +type FlagSet struct { + LocalFlag string + DoWrite, DoDiff *bool +} + +type pkg struct { + list map[int][]string + comment map[string]string + alias map[string]string +} + +func newPkg(data [][]byte, localFlag string) *pkg { + listMap := make(map[int][]string) + commentMap := make(map[string]string) + aliasMap := make(map[string]string) + p := &pkg{ + list: listMap, + comment: commentMap, + alias: aliasMap, + } + + formatData := make([]string, 0) + // remove all empty lines + for _, v := range data { + if len(v) > 0 { + formatData = append(formatData, strings.TrimSpace(string(v))) + } + } + + n := len(formatData) + for i := n - 1; i >= 0; i-- { + line := formatData[i] + + // check commentFlag: + // 1. one line commentFlag + // 2. commentFlag after import path + commentIndex := strings.Index(line, commentFlag) + if commentIndex == 0 { + // comment in the last line is useless, ignore it + if i+1 >= n { + continue + } + pkg, _, _ := getPkgInfo(formatData[i+1], strings.Index(formatData[i+1], commentFlag) >= 0) + p.comment[pkg] = line + continue + } else if commentIndex > 0 { + pkg, alias, comment := getPkgInfo(line, true) + if alias != "" { + p.alias[pkg] = alias + } + + p.comment[pkg] = comment + pkgType := getPkgType(pkg, localFlag) + p.list[pkgType] = append(p.list[pkgType], pkg) + continue + } + + pkg, alias, _ := getPkgInfo(line, false) + + if alias != "" { + p.alias[pkg] = alias + } + + pkgType := getPkgType(pkg, localFlag) + p.list[pkgType] = append(p.list[pkgType], pkg) + } + + return p +} + +// fmt format import pkgs as expected +func (p *pkg) fmt() []byte { + ret := make([]string, 0, 100) + + for pkgType := range []int{standard, remote, local} { + sort.Strings(p.list[pkgType]) + for _, s := range p.list[pkgType] { + if p.comment[s] != "" { + l := fmt.Sprintf("%s%s%s%s", linebreak, indent, p.comment[s], linebreak) + ret = append(ret, l) + } + + if p.alias[s] != "" { + s = fmt.Sprintf("%s%s%s%s%s", indent, p.alias[s], blank, s, linebreak) + } else { + s = fmt.Sprintf("%s%s%s", indent, s, linebreak) + } + + ret = append(ret, s) + } + + if len(p.list[pkgType]) > 0 { + ret = append(ret, linebreak) + } + } + if ret[len(ret)-1] == linebreak { + ret = ret[:len(ret)-1] + } + + // remove duplicate empty lines + s1 := fmt.Sprintf("%s%s%s%s", linebreak, linebreak, linebreak, indent) + s2 := fmt.Sprintf("%s%s%s", linebreak, linebreak, indent) + return []byte(strings.ReplaceAll(strings.Join(ret, ""), s1, s2)) +} + +// getPkgInfo assume line is a import path, and return (path, alias, comment) +func getPkgInfo(line string, comment bool) (string, string, string) { + if comment { + s := strings.Split(line, commentFlag) + pkgArray := strings.Split(s[0], blank) + if len(pkgArray) > 1 { + return pkgArray[1], pkgArray[0], fmt.Sprintf("%s%s%s", commentFlag, blank, strings.TrimSpace(s[1])) + } else { + return strings.TrimSpace(pkgArray[0]), "", fmt.Sprintf("%s%s%s", commentFlag, blank, strings.TrimSpace(s[1])) + } + } else { + pkgArray := strings.Split(line, blank) + if len(pkgArray) > 1 { + return pkgArray[1], pkgArray[0], "" + } else { + return pkgArray[0], "", "" + } + } +} + +func getPkgType(line, localFlag string) int { + pkgName := strings.Trim(line, "\"\\`") + + if localFlag != "" && strings.HasPrefix(pkgName, localFlag) { + return local + } + + if isStandardPackage(pkgName) { + return standard + } + + return remote +} + +const ( + blank = " " + indent = "\t" + linebreak = "\n" +) + +func diff(b1, b2 []byte, filename string) (data []byte, err error) { + f1, err := writeTempFile("", "gci", b1) + if err != nil { + return + } + defer os.Remove(f1) + + f2, err := writeTempFile("", "gci", b2) + if err != nil { + return + } + defer os.Remove(f2) + + cmd := "diff" + + data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput() + if len(data) > 0 { + // diff exits with a non-zero status when the files don't match. + // Ignore that failure as long as we get output. + return replaceTempFilename(data, filename) + } + return +} + +func writeTempFile(dir, prefix string, data []byte) (string, error) { + file, err := ioutil.TempFile(dir, prefix) + if err != nil { + return "", err + } + _, err = file.Write(data) + if err1 := file.Close(); err == nil { + err = err1 + } + if err != nil { + os.Remove(file.Name()) + return "", err + } + return file.Name(), nil +} + +// replaceTempFilename replaces temporary filenames in diff with actual one. +// +// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500 +// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500 +// ... +// -> +// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500 +// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500 +// ... +func replaceTempFilename(diff []byte, filename string) ([]byte, error) { + bs := bytes.SplitN(diff, []byte{'\n'}, 3) + if len(bs) < 3 { + return nil, fmt.Errorf("got unexpected diff for %s", filename) + } + // Preserve timestamps. + var t0, t1 []byte + if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 { + t0 = bs[0][i:] + } + if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 { + t1 = bs[1][i:] + } + // Always print filepath with slash separator. + f := filepath.ToSlash(filename) + bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) + bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) + return bytes.Join(bs, []byte{'\n'}), nil +} + +func visitFile(set *FlagSet) filepath.WalkFunc { + return func(path string, f os.FileInfo, err error) error { + if err == nil && isGoFile(f) { + err = processFile(path, os.Stdout, set) + } + return err + } +} + +func WalkDir(path string, set *FlagSet) error { + return filepath.Walk(path, visitFile(set)) +} + +func isGoFile(f os.FileInfo) bool { + // ignore non-Go files + name := f.Name() + return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") +} + +func ProcessFile(filename string, out io.Writer, set *FlagSet) error { + return processFile(filename, out, set) +} + +func processFile(filename string, out io.Writer, set *FlagSet) error { + var err error + + f, err := os.Open(filename) + if err != nil { + return err + } + defer f.Close() + + src, err := ioutil.ReadAll(f) + if err != nil { + return err + } + + ori := make([]byte, len(src)) + copy(ori, src) + start := bytes.Index(src, importStartFlag) + // in case no importStartFlag or importStartFlag exist in the commentFlag + if start < 0 { + fmt.Printf("skip file %s since no import\n", filename) + return nil + } + end := bytes.Index(src[start:], importEndFlag) + start + + ret := bytes.Split(src[start+len(importStartFlag):end], []byte(linebreak)) + + p := newPkg(ret, set.LocalFlag) + + res := append(src[:start+len(importStartFlag)], append(p.fmt(), src[end+1:]...)...) + + if !bytes.Equal(ori, res) { + if *set.DoWrite { + // On Windows, we need to re-set the permissions from the file. See golang/go#38225. + var perms os.FileMode + if fi, err := os.Stat(filename); err == nil { + perms = fi.Mode() & os.ModePerm + } + err = ioutil.WriteFile(filename, res, perms) + if err != nil { + return err + } + } + if *set.DoDiff { + data, err := diff(ori, res, filename) + if err != nil { + return fmt.Errorf("failed to diff: %v", err) + } + fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename)) + if _, err := out.Write(data); err != nil { + return fmt.Errorf("failed to write: %v", err) + } + } + } + if !*set.DoWrite && !*set.DoDiff { + if _, err = out.Write(res); err != nil { + return fmt.Errorf("failed to write: %v", err) + } + } + + return err +} + +// Run return source and result in []byte if succeed +func Run(filename string, set *FlagSet) ([]byte, []byte, error) { + var err error + + f, err := os.Open(filename) + if err != nil { + return nil, nil, err + } + defer f.Close() + + src, err := ioutil.ReadAll(f) + if err != nil { + return nil, nil, err + } + + ori := make([]byte, len(src)) + copy(ori, src) + start := bytes.Index(src, importStartFlag) + // in case no importStartFlag or importStartFlag exist in the commentFlag + if start < 0 { + return nil, nil, nil + } + end := bytes.Index(src[start:], importEndFlag) + start + + ret := bytes.Split(src[start+len(importStartFlag):end], []byte(linebreak)) + + p := newPkg(ret, set.LocalFlag) + + res := append(src[:start+len(importStartFlag)], append(p.fmt(), src[end+1:]...)...) + + if bytes.Equal(ori, res) { + return ori, nil, nil + } + + return ori, res, nil +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/std.go b/vendor/github.com/daixiang0/gci/pkg/gci/std.go new file mode 100644 index 0000000..ac96b55 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/std.go @@ -0,0 +1,161 @@ +package gci + +// Code generated based on go1.16beta1. DO NOT EDIT. + +var standardPackages = map[string]struct{}{ + "archive/tar": {}, + "archive/zip": {}, + "bufio": {}, + "bytes": {}, + "compress/bzip2": {}, + "compress/flate": {}, + "compress/gzip": {}, + "compress/lzw": {}, + "compress/zlib": {}, + "container/heap": {}, + "container/list": {}, + "container/ring": {}, + "context": {}, + "crypto": {}, + "crypto/aes": {}, + "crypto/cipher": {}, + "crypto/des": {}, + "crypto/dsa": {}, + "crypto/ecdsa": {}, + "crypto/ed25519": {}, + "crypto/elliptic": {}, + "crypto/hmac": {}, + "crypto/md5": {}, + "crypto/rand": {}, + "crypto/rc4": {}, + "crypto/rsa": {}, + "crypto/sha1": {}, + "crypto/sha256": {}, + "crypto/sha512": {}, + "crypto/subtle": {}, + "crypto/tls": {}, + "crypto/x509": {}, + "crypto/x509/pkix": {}, + "database/sql": {}, + "database/sql/driver": {}, + "debug/dwarf": {}, + "debug/elf": {}, + "debug/gosym": {}, + "debug/macho": {}, + "debug/pe": {}, + "debug/plan9obj": {}, + "embed": {}, + "encoding": {}, + "encoding/ascii85": {}, + "encoding/asn1": {}, + "encoding/base32": {}, + "encoding/base64": {}, + "encoding/binary": {}, + "encoding/csv": {}, + "encoding/gob": {}, + "encoding/hex": {}, + "encoding/json": {}, + "encoding/pem": {}, + "encoding/xml": {}, + "errors": {}, + "expvar": {}, + "flag": {}, + "fmt": {}, + "go/ast": {}, + "go/build": {}, + "go/constant": {}, + "go/doc": {}, + "go/format": {}, + "go/importer": {}, + "go/parser": {}, + "go/printer": {}, + "go/scanner": {}, + "go/token": {}, + "go/types": {}, + "hash": {}, + "hash/adler32": {}, + "hash/crc32": {}, + "hash/crc64": {}, + "hash/fnv": {}, + "hash/maphash": {}, + "html": {}, + "html/template": {}, + "image": {}, + "image/color": {}, + "image/color/palette": {}, + "image/draw": {}, + "image/gif": {}, + "image/jpeg": {}, + "image/png": {}, + "index/suffixarray": {}, + "io": {}, + "io/fs": {}, + "io/ioutil": {}, + "log": {}, + "log/syslog": {}, + "math": {}, + "math/big": {}, + "math/bits": {}, + "math/cmplx": {}, + "math/rand": {}, + "mime": {}, + "mime/multipart": {}, + "mime/quotedprintable": {}, + "net": {}, + "net/http": {}, + "net/http/cgi": {}, + "net/http/cookiejar": {}, + "net/http/fcgi": {}, + "net/http/httptest": {}, + "net/http/httptrace": {}, + "net/http/httputil": {}, + "net/http/pprof": {}, + "net/mail": {}, + "net/rpc": {}, + "net/rpc/jsonrpc": {}, + "net/smtp": {}, + "net/textproto": {}, + "net/url": {}, + "os": {}, + "os/exec": {}, + "os/signal": {}, + "os/user": {}, + "path": {}, + "path/filepath": {}, + "plugin": {}, + "reflect": {}, + "regexp": {}, + "regexp/syntax": {}, + "runtime": {}, + "runtime/cgo": {}, + "runtime/debug": {}, + "runtime/metrics": {}, + "runtime/pprof": {}, + "runtime/race": {}, + "runtime/trace": {}, + "sort": {}, + "strconv": {}, + "strings": {}, + "sync": {}, + "sync/atomic": {}, + "syscall": {}, + "testing": {}, + "testing/fstest": {}, + "testing/iotest": {}, + "testing/quick": {}, + "text/scanner": {}, + "text/tabwriter": {}, + "text/template": {}, + "text/template/parse": {}, + "time": {}, + "time/tzdata": {}, + "unicode": {}, + "unicode/utf16": {}, + "unicode/utf8": {}, + "unsafe": {}, +} + +func isStandardPackage(pkg string) bool { + _, ok := standardPackages[pkg] + return ok +} diff --git a/vendor/github.com/denis-tingajkin/go-header/.gitignore b/vendor/github.com/denis-tingajkin/go-header/.gitignore new file mode 100644 index 0000000..62c8935 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/.gitignore @@ -0,0 +1 @@ +.idea/ \ No newline at end of file diff --git a/vendor/github.com/denis-tingajkin/go-header/.go-header.yml b/vendor/github.com/denis-tingajkin/go-header/.go-header.yml new file mode 100644 index 0000000..446d731 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/.go-header.yml @@ -0,0 +1,19 @@ +values: + regexp: + copyright-holder: Copyright \(c\) {{year-range}} Denis Tingajkin +template: | + {{copyright-holder}} + + SPDX-License-Identifier: Apache-2.0 + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/denis-tingajkin/go-header/LICENSE b/vendor/github.com/denis-tingajkin/go-header/LICENSE new file mode 100644 index 0000000..a2c9fda --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/vendor/github.com/denis-tingajkin/go-header/README.md b/vendor/github.com/denis-tingajkin/go-header/README.md new file mode 100644 index 0000000..1a2a3d9 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/README.md @@ -0,0 +1,81 @@ +# go-header +[![Actions Status](https://github.com/denis-tingajkin/go-header/workflows/ci/badge.svg)](https://github.com/denis-tingajkin/go-header/actions) + +Go source code linter providing checks for license headers. + +## Installation + +For installation you can simply use `go get`. + +```bash +go get github.com/denis-tingajkin/go-header/cmd/go-header +``` + +## Configuration + +To configuring `.go-header.yml` linter you simply need to fill the next fields: + +```yaml +--- +temaplte: # expects header template string. +tempalte-path: # expects path to file with license header string. +values: # expects `const` or `regexp` node with values where values is a map string to string. + const: + key1: value1 # const value just checks equality. Note `key1` should be used in template string as {{ key1 }} or {{ KEY1 }}. + regexp: + key2: value2 # regexp value just checks regex match. The value should be a valid regexp pattern. Note `key2` should be used in template string as {{ key2 }} or {{ KEY2 }}. +``` + +Where `values` also can be used recursively. Example: + +```yaml +values: + const: + key1: "value" + regexp: + key2: "{{key1}} value1" # Reads as regex pattern "value value1" +``` + +## Bult-in values + +- **YEAR** - Expects current year. Example header value: `2020`. Example of template using: `{{YEAR}}` or `{{year}}`. +- **YEAR-RANGE** - Expects any valid year interval or current year. Example header value: `2020` or `2000-2020`. Example of template using: `{{year-range}}` or `{{YEAR-RANGE}}`. + +## Execution + +`go-header` linter expects file paths on input. If you want to run `go-header` only on diff files, then you can use this command: + +```bash +go-header $(git diff --name-only | grep -E '.*\.go') +``` + +## Setup example + +### Step 1 + +Create configuration file `.go-header.yml` in the root of project. + +```yaml +--- +values: + const: + MY COMPANY: mycompany.com +template: | + {{ MY COMPANY }} + SPDX-License-Identifier: Apache-2.0 + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +``` + +### Step 2 +You are ready! Execute `go-header ${PATH_TO_FILES}` from the root of the project. diff --git a/vendor/github.com/denis-tingajkin/go-header/analyzer.go b/vendor/github.com/denis-tingajkin/go-header/analyzer.go new file mode 100644 index 0000000..5707890 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/analyzer.go @@ -0,0 +1,146 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import ( + "fmt" + "go/ast" + "os" + "os/exec" + "strings" + "time" +) + +type Target struct { + Path string + File *ast.File +} + +const iso = "2006-01-02 15:04:05 -0700" + +func (t *Target) ModTime() (time.Time, error) { + diff, err := exec.Command("git", "diff", t.Path).CombinedOutput() + if err == nil && len(diff) == 0 { + line, err := exec.Command("git", "log", "-1", "--pretty=format:%cd", "--date=iso", "--", t.Path).CombinedOutput() + if err == nil { + return time.Parse(iso, string(line)) + } + } + info, err := os.Stat(t.Path) + if err != nil { + return time.Time{}, err + } + return info.ModTime(), nil +} + +type Analyzer struct { + values map[string]Value + template string +} + +func (a *Analyzer) Analyze(target *Target) Issue { + if a.template == "" { + return NewIssue("Missed template for check") + } + if t, err := target.ModTime(); err == nil { + if t.Year() != time.Now().Year() { + return nil + } + } + file := target.File + var header string + var offset = Location{ + Position: 1, + } + if len(file.Comments) > 0 && file.Comments[0].Pos() < file.Package { + if strings.HasPrefix(file.Comments[0].List[0].Text, "/*") { + header = (&ast.CommentGroup{List: []*ast.Comment{file.Comments[0].List[0]}}).Text() + } else { + header = file.Comments[0].Text() + offset.Position += 3 + } + } + header = strings.TrimSpace(header) + if header == "" { + return NewIssue("Missed header for check") + } + s := NewReader(header) + s.SetOffset(offset) + t := NewReader(a.template) + for !s.Done() && !t.Done() { + templateCh := t.Peek() + if templateCh == '{' { + name := a.readField(t) + if a.values[name] == nil { + return NewIssue(fmt.Sprintf("Template has unknown value: %v", name)) + } + if i := a.values[name].Read(s); i != nil { + return i + } + continue + } + sourceCh := s.Peek() + if sourceCh != templateCh { + l := s.Location() + notNextLine := func(r rune) bool { + return r != '\n' + } + actual := s.ReadWhile(notNextLine) + expected := t.ReadWhile(notNextLine) + return NewIssueWithLocation(fmt.Sprintf("Actual: %v\nExpected:%v", actual, expected), l) + } + s.Next() + t.Next() + } + if !s.Done() { + l := s.Location() + return NewIssueWithLocation(fmt.Sprintf("Unexpected string: %v", s.Finish()), l) + } + if !t.Done() { + l := s.Location() + return NewIssueWithLocation(fmt.Sprintf("Missed string: %v", t.Finish()), l) + } + return nil +} + +func (a *Analyzer) readField(reader *Reader) string { + _ = reader.Next() + _ = reader.Next() + + r := reader.ReadWhile(func(r rune) bool { + return r != '}' + }) + + _ = reader.Next() + _ = reader.Next() + + return strings.ToLower(strings.TrimSpace(r)) +} + +func New(options ...Option) *Analyzer { + a := &Analyzer{} + for _, o := range options { + o.apply(a) + } + for _, v := range a.values { + err := v.Calculate(a.values) + if err != nil { + panic(err.Error()) + } + } + return a +} diff --git a/vendor/github.com/denis-tingajkin/go-header/config.go b/vendor/github.com/denis-tingajkin/go-header/config.go new file mode 100644 index 0000000..fa8b23c --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/config.go @@ -0,0 +1,99 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import ( + "errors" + "fmt" + "io/ioutil" + "strings" + "time" + + "gopkg.in/yaml.v2" +) + +// Configuration represents go-header linter setup parameters +type Configuration struct { + // Values is map of values. Supports two types 'const` and `regexp`. Values can be used recursively. + Values map[string]map[string]string `yaml:"values"'` + // Template is template for checking. Uses values. + Template string `yaml:"template"` + // TemplatePath path to the template file. Useful if need to load the template from a specific file. + TemplatePath string `yaml:"template-path"` +} + +func (c *Configuration) builtInValues() map[string]Value { + var result = make(map[string]Value) + year := fmt.Sprint(time.Now().Year()) + result["year-range"] = &RegexpValue{ + RawValue: strings.ReplaceAll(`(20\d\d\-YEAR)|(YEAR)`, "YEAR", year), + } + result["year"] = &ConstValue{ + RawValue: year, + } + return result +} + +func (c *Configuration) GetValues() (map[string]Value, error) { + var result = c.builtInValues() + createConst := func(raw string) Value { + return &ConstValue{RawValue: raw} + } + createRegexp := func(raw string) Value { + return &RegexpValue{RawValue: raw} + } + appendValues := func(m map[string]string, create func(string) Value) { + for k, v := range m { + key := strings.ToLower(k) + result[key] = create(v) + } + } + for k, v := range c.Values { + switch k { + case "const": + appendValues(v, createConst) + case "regexp": + appendValues(v, createRegexp) + default: + return nil, fmt.Errorf("unknown value type %v", k) + } + } + return result, nil +} + +func (c *Configuration) GetTemplate() (string, error) { + if c.Template != "" { + return c.Template, nil + } + if c.TemplatePath == "" { + return "", errors.New("template has not passed") + } + if b, err := ioutil.ReadFile(c.TemplatePath); err != nil { + return "", err + } else { + c.Template = strings.TrimSpace(string(b)) + return c.Template, nil + } +} + +func (c *Configuration) Parse(p string) error { + b, err := ioutil.ReadFile(p) + if err != nil { + return err + } + return yaml.Unmarshal(b, c) +} diff --git a/vendor/github.com/denis-tingajkin/go-header/go.mod b/vendor/github.com/denis-tingajkin/go-header/go.mod new file mode 100644 index 0000000..68984cb --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/go.mod @@ -0,0 +1,10 @@ +module github.com/denis-tingajkin/go-header + +go 1.15 + +require ( + github.com/fatih/color v1.9.0 + github.com/sirupsen/logrus v1.6.0 + github.com/stretchr/testify v1.5.1 + gopkg.in/yaml.v2 v2.2.2 +) diff --git a/vendor/github.com/denis-tingajkin/go-header/go.sum b/vendor/github.com/denis-tingajkin/go-header/go.sum new file mode 100644 index 0000000..4033b08 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/go.sum @@ -0,0 +1,31 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894 h1:Cz4ceDQGXuKRnVBDTS23GTn/pU5OE2C0WrNTOYK1Uuc= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/denis-tingajkin/go-header/issue.go b/vendor/github.com/denis-tingajkin/go-header/issue.go new file mode 100644 index 0000000..2ff7bfd --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/issue.go @@ -0,0 +1,48 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +type Issue interface { + Location() Location + Message() string +} + +type issue struct { + msg string + location Location +} + +func (i *issue) Location() Location { + return i.location +} + +func (i *issue) Message() string { + return i.msg +} + +func NewIssueWithLocation(msg string, location Location) Issue { + return &issue{ + msg: msg, + location: location, + } +} + +func NewIssue(msg string) Issue { + return &issue{ + msg: msg, + } +} diff --git a/vendor/github.com/denis-tingajkin/go-header/location.go b/vendor/github.com/denis-tingajkin/go-header/location.go new file mode 100644 index 0000000..ba4d190 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/location.go @@ -0,0 +1,35 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import "fmt" + +type Location struct { + Line int + Position int +} + +func (l Location) String() string { + return fmt.Sprintf("%v:%v", l.Line+1, l.Position) +} + +func (l Location) Add(other Location) Location { + return Location{ + Line: l.Line + other.Line, + Position: l.Position + other.Position, + } +} diff --git a/vendor/github.com/denis-tingajkin/go-header/option.go b/vendor/github.com/denis-tingajkin/go-header/option.go new file mode 100644 index 0000000..afbcb62 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/option.go @@ -0,0 +1,44 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import "strings" + +type Option interface { + apply(*Analyzer) +} + +type applyAnalyzerOptionFunc func(*Analyzer) + +func (f applyAnalyzerOptionFunc) apply(a *Analyzer) { + f(a) +} + +func WithValues(values map[string]Value) Option { + return applyAnalyzerOptionFunc(func(a *Analyzer) { + a.values = make(map[string]Value) + for k, v := range values { + a.values[strings.ToLower(k)] = v + } + }) +} + +func WithTemplate(template string) Option { + return applyAnalyzerOptionFunc(func(a *Analyzer) { + a.template = template + }) +} diff --git a/vendor/github.com/denis-tingajkin/go-header/reader.go b/vendor/github.com/denis-tingajkin/go-header/reader.go new file mode 100644 index 0000000..2393c94 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/reader.go @@ -0,0 +1,116 @@ +/* +Copyright (c) 2020 Denis Tingajkin + +SPDX-License-Identifier: Apache-2.0 + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package goheader + +func NewReader(text string) *Reader { + return &Reader{source: text} +} + +type Reader struct { + source string + position int + location Location + offset Location +} + +func (r *Reader) SetOffset(offset Location) { + r.offset = offset +} + +func (r *Reader) Position() int { + return r.position +} + +func (r *Reader) Location() Location { + return r.location.Add(r.offset) +} + +func (r *Reader) Peek() rune { + if r.Done() { + return rune(0) + } + return rune(r.source[r.position]) +} + +func (r *Reader) Done() bool { + return r.position >= len(r.source) +} + +func (r *Reader) Next() rune { + if r.Done() { + return rune(0) + } + reuslt := r.Peek() + if reuslt == '\n' { + r.location.Line++ + r.location.Position = 0 + } else { + r.location.Position++ + } + r.position++ + return reuslt +} + +func (r *Reader) Finish() string { + if r.position >= len(r.source) { + return "" + } + defer r.till() + return r.source[r.position:] +} + +func (r *Reader) SetPosition(pos int) { + if pos < 0 { + r.position = 0 + } + r.position = pos + r.location = r.calculateLocation() +} + +func (r *Reader) ReadWhile(match func(rune) bool) string { + if match == nil { + return "" + } + start := r.position + for !r.Done() && match(r.Peek()) { + r.Next() + } + return r.source[start:r.position] +} + +func (r *Reader) till() { + r.position = len(r.source) + r.location = r.calculateLocation() +} + +func (r *Reader) calculateLocation() Location { + min := len(r.source) + if min > r.position { + min = r.position + } + x, y := 0, 0 + for i := 0; i < min; i++ { + if r.source[i] == '\n' { + y++ + x = 0 + } else { + x++ + } + } + return Location{Line: y, Position: x} +} diff --git a/vendor/github.com/denis-tingajkin/go-header/value.go b/vendor/github.com/denis-tingajkin/go-header/value.go new file mode 100644 index 0000000..2a3adcd --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/value.go @@ -0,0 +1,128 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import ( + "errors" + "fmt" + "regexp" + "strings" +) + +type Calculable interface { + Calculate(map[string]Value) error + Get() string +} + +type Value interface { + Calculable + Read(*Reader) Issue +} + +func calculateValue(calculable Calculable, values map[string]Value) (string, error) { + sb := strings.Builder{} + r := calculable.Get() + var endIndex int + var startIndex int + for startIndex = strings.Index(r, "{{"); startIndex >= 0; startIndex = strings.Index(r, "{{") { + _, _ = sb.WriteString(r[:startIndex]) + endIndex = strings.Index(r, "}}") + if endIndex < 0 { + return "", errors.New("missed value ending") + } + subVal := strings.ToLower(strings.TrimSpace(r[startIndex+2 : endIndex])) + if val := values[subVal]; val != nil { + if err := val.Calculate(values); err != nil { + return "", err + } + sb.WriteString(val.Get()) + } else { + return "", fmt.Errorf("unknown value name %v", subVal) + } + endIndex += 2 + r = r[endIndex:] + } + _, _ = sb.WriteString(r) + return sb.String(), nil +} + +type ConstValue struct { + RawValue string +} + +func (c *ConstValue) Calculate(values map[string]Value) error { + v, err := calculateValue(c, values) + if err != nil { + return err + } + c.RawValue = v + return nil +} + +func (c *ConstValue) Get() string { + return c.RawValue +} + +func (c *ConstValue) Read(s *Reader) Issue { + l := s.Location() + p := s.Position() + for _, ch := range c.Get() { + if ch != s.Peek() { + s.SetPosition(p) + f := s.ReadWhile(func(r rune) bool { + return r != '\n' + }) + return NewIssueWithLocation(fmt.Sprintf("Expected:%v, Actual: %v", c.Get(), f), l) + } + s.Next() + } + return nil +} + +type RegexpValue struct { + RawValue string +} + +func (r *RegexpValue) Calculate(values map[string]Value) error { + v, err := calculateValue(r, values) + if err != nil { + return err + } + r.RawValue = v + return nil +} + +func (r *RegexpValue) Get() string { + return r.RawValue +} + +func (r *RegexpValue) Read(s *Reader) Issue { + l := s.Location() + p := regexp.MustCompile(r.Get()) + pos := s.Position() + str := s.Finish() + s.SetPosition(pos) + indexes := p.FindAllIndex([]byte(str), -1) + if len(indexes) == 0 { + return NewIssueWithLocation(fmt.Sprintf("Pattern %v doesn't match.", p.String()), l) + } + s.SetPosition(pos + indexes[0][1]) + return nil +} + +var _ Value = &ConstValue{} +var _ Value = &RegexpValue{} diff --git a/vendor/github.com/esimonov/ifshort/LICENSE b/vendor/github.com/esimonov/ifshort/LICENSE new file mode 100644 index 0000000..a04e339 --- /dev/null +++ b/vendor/github.com/esimonov/ifshort/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Eugene Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go new file mode 100644 index 0000000..93d470a --- /dev/null +++ b/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go @@ -0,0 +1,238 @@ +package analyzer + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var maxDeclChars, maxDeclLines int + +const ( + maxDeclLinesUsage = `maximum length of variable declaration measured in number of lines, after which the linter won't suggest using short syntax. +Has precedence over max-decl-chars.` + maxDeclCharsUsage = `maximum length of variable declaration measured in number of characters, after which the linter won't suggest using short syntax.` +) + +func init() { + Analyzer.Flags.IntVar(&maxDeclLines, "max-decl-lines", 1, maxDeclLinesUsage) + Analyzer.Flags.IntVar(&maxDeclChars, "max-decl-chars", 30, maxDeclCharsUsage) +} + +// Analyzer is an analysis.Analyzer instance for ifshort linter. +var Analyzer = &analysis.Analyzer{ + Name: "ifshort", + Doc: "Checks that your code uses short syntax for if-statements whenever possible.", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + } + + inspector.Preorder(nodeFilter, func(node ast.Node) { + fdecl := node.(*ast.FuncDecl) + + /*if fdecl.Name.Name != "notUsed_BinaryExpressionInIndex_OK" { + return + }*/ + + if fdecl == nil || fdecl.Body == nil { + return + } + + candidates := getNamedOccurrenceMap(fdecl, pass) + + for _, stmt := range fdecl.Body.List { + candidates.checkStatement(stmt, token.NoPos) + } + + for varName := range candidates { + for marker, occ := range candidates[varName] { + // If two or more vars with the same scope marker - skip them. + if candidates.isFoundByScopeMarker(marker) { + continue + } + + pass.Reportf(occ.declarationPos, + "variable '%s' is only used in the if-statement (%s); consider using short syntax", + varName, pass.Fset.Position(occ.ifStmtPos)) + } + } + }) + return nil, nil +} + +func (nom namedOccurrenceMap) checkStatement(stmt ast.Stmt, ifPos token.Pos) { + switch v := stmt.(type) { + case *ast.AssignStmt: + for _, el := range v.Rhs { + nom.checkExpression(el, ifPos) + } + if isAssign(v.Tok) { + for _, el := range v.Lhs { + nom.checkExpression(el, ifPos) + } + } + case *ast.DeferStmt: + for _, a := range v.Call.Args { + nom.checkExpression(a, ifPos) + } + case *ast.ExprStmt: + if callExpr, ok := v.X.(*ast.CallExpr); ok { + nom.checkExpression(callExpr, ifPos) + } + case *ast.ForStmt: + for _, el := range v.Body.List { + nom.checkStatement(el, ifPos) + } + + if bexpr, ok := v.Cond.(*ast.BinaryExpr); ok { + nom.checkExpression(bexpr.X, ifPos) + nom.checkExpression(bexpr.Y, ifPos) + } + + nom.checkStatement(v.Post, ifPos) + case *ast.GoStmt: + for _, a := range v.Call.Args { + nom.checkExpression(a, token.NoPos) + } + case *ast.IfStmt: + for _, el := range v.Body.List { + nom.checkStatement(el, v.If) + } + + switch cond := v.Cond.(type) { + case *ast.BinaryExpr: + nom.checkExpression(cond.X, v.If) + nom.checkExpression(cond.Y, v.If) + case *ast.CallExpr: + nom.checkExpression(cond, v.If) + } + + if init, ok := v.Init.(*ast.AssignStmt); ok { + for _, e := range init.Rhs { + nom.checkExpression(e, v.If) + } + } + case *ast.IncDecStmt: + nom.checkExpression(v.X, ifPos) + + case *ast.RangeStmt: + nom.checkExpression(v.X, token.NoPos) + case *ast.ReturnStmt: + for _, r := range v.Results { + nom.checkExpression(r, token.NoPos) + } + case *ast.SendStmt: + nom.checkExpression(v.Value, token.NoPos) + case *ast.SwitchStmt: + nom.checkExpression(v.Tag, token.NoPos) + + for _, el := range v.Body.List { + clauses, ok := el.(*ast.CaseClause) + if !ok { + continue + } + + for _, c := range clauses.List { + switch v := c.(type) { + case *ast.BinaryExpr: + nom.checkExpression(v.X, token.NoPos) + nom.checkExpression(v.Y, token.NoPos) + case *ast.Ident: + nom.checkExpression(v, token.NoPos) + } + } + + for _, c := range clauses.Body { + if est, ok := c.(*ast.ExprStmt); ok { + nom.checkExpression(est.X, token.NoPos) + } + + switch v := c.(type) { + case *ast.AssignStmt: + for _, el := range v.Rhs { + nom.checkExpression(el, token.NoPos) + } + case *ast.ExprStmt: + nom.checkExpression(v.X, token.NoPos) + } + } + } + } +} + +func (nom namedOccurrenceMap) checkExpression(candidate ast.Expr, ifPos token.Pos) { + switch v := candidate.(type) { + case *ast.BinaryExpr: + nom.checkExpression(v.X, ifPos) + case *ast.CallExpr: + for _, arg := range v.Args { + nom.checkExpression(arg, ifPos) + } + if fun, ok := v.Fun.(*ast.SelectorExpr); ok { + nom.checkExpression(fun.X, ifPos) + } + case *ast.CompositeLit: + for _, el := range v.Elts { + kv, ok := el.(*ast.KeyValueExpr) + if !ok { + continue + } + if ident, ok := kv.Key.(*ast.Ident); ok { + nom.checkExpression(ident, ifPos) + } + if ident, ok := kv.Value.(*ast.Ident); ok { + nom.checkExpression(ident, ifPos) + } + } + case *ast.FuncLit: + for _, el := range v.Body.List { + nom.checkStatement(el, ifPos) + } + case *ast.Ident: + if nom[v.Name].isEmponymousKey(ifPos) { + return + } + + scopeMarker1 := nom[v.Name].getScopeMarkerForPosition(v.Pos()) + + delete(nom[v.Name], scopeMarker1) + + for k := range nom { + for scopeMarker2 := range nom[k] { + if scopeMarker1 == scopeMarker2 { + delete(nom[k], scopeMarker2) + } + } + } + case *ast.IndexExpr: + nom.checkExpression(v.X, ifPos) + if index, ok := v.Index.(*ast.BinaryExpr); ok { + nom.checkExpression(index.X, ifPos) + } + case *ast.SelectorExpr: + nom.checkExpression(v.X, ifPos) + case *ast.SliceExpr: + nom.checkExpression(v.High, ifPos) + nom.checkExpression(v.Low, ifPos) + nom.checkExpression(v.X, ifPos) + case *ast.UnaryExpr: + nom.checkExpression(v.X, ifPos) + } +} + +func isAssign(tok token.Token) bool { + return (tok == token.ASSIGN || + tok == token.ADD_ASSIGN || tok == token.SUB_ASSIGN || + tok == token.MUL_ASSIGN || tok == token.QUO_ASSIGN || tok == token.REM_ASSIGN || + tok == token.AND_ASSIGN || tok == token.OR_ASSIGN || tok == token.XOR_ASSIGN || tok == token.AND_NOT_ASSIGN || + tok == token.SHL_ASSIGN || tok == token.SHR_ASSIGN) +} diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go new file mode 100644 index 0000000..6eca613 --- /dev/null +++ b/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go @@ -0,0 +1,236 @@ +package analyzer + +import ( + "go/ast" + "go/token" + "time" + + "golang.org/x/tools/go/analysis" +) + +// occurrence is a variable occurrence. +type occurrence struct { + declarationPos token.Pos + ifStmtPos token.Pos +} + +func (occ *occurrence) isComplete() bool { + return occ.ifStmtPos != token.NoPos && occ.declarationPos != token.NoPos +} + +// scopeMarkeredOccurences is a map of scope markers to variable occurrences. +type scopeMarkeredOccurences map[int64]occurrence + +func (smo scopeMarkeredOccurences) getGreatestMarker() int64 { + var maxScopeMarker int64 + + for marker := range smo { + if marker > maxScopeMarker { + maxScopeMarker = marker + } + } + return maxScopeMarker +} + +// find scope marker of the greatest token.Pos that is smaller than provided. +func (smo scopeMarkeredOccurences) getScopeMarkerForPosition(pos token.Pos) int64 { + var m int64 + var foundPos token.Pos + + for marker, occ := range smo { + if occ.declarationPos < pos && occ.declarationPos >= foundPos { + m = marker + foundPos = occ.declarationPos + } + } + return m +} + +func (smo scopeMarkeredOccurences) isEmponymousKey(pos token.Pos) bool { + if pos == token.NoPos { + return false + } + + for _, occ := range smo { + if occ.ifStmtPos == pos { + return true + } + } + return false +} + +// namedOccurrenceMap is a map of variable names to scopeMarkeredOccurences. +type namedOccurrenceMap map[string]scopeMarkeredOccurences + +func getNamedOccurrenceMap(fdecl *ast.FuncDecl, pass *analysis.Pass) namedOccurrenceMap { + nom := namedOccurrenceMap(map[string]scopeMarkeredOccurences{}) + + if fdecl == nil || fdecl.Body == nil { + return nom + } + + for _, stmt := range fdecl.Body.List { + switch v := stmt.(type) { + case *ast.AssignStmt: + nom.addFromAssignment(pass, v) + case *ast.IfStmt: + nom.addFromCondition(v) + nom.addFromIfClause(v) + nom.addFromElseClause(v) + } + } + + candidates := namedOccurrenceMap(map[string]scopeMarkeredOccurences{}) + + for varName, markeredOccs := range nom { + for marker, occ := range markeredOccs { + if !occ.isComplete() && !nom.isFoundByScopeMarker(marker) { + continue + } + if _, ok := candidates[varName]; !ok { + candidates[varName] = scopeMarkeredOccurences{ + marker: occ, + } + } else { + candidates[varName][marker] = occ + } + } + } + return candidates +} + +func (nom namedOccurrenceMap) isFoundByScopeMarker(scopeMarker int64) bool { + var i int + + for _, markeredOccs := range nom { + for marker := range markeredOccs { + if marker == scopeMarker { + i++ + } + } + } + return i >= 2 +} + +func (nom namedOccurrenceMap) addFromAssignment(pass *analysis.Pass, assignment *ast.AssignStmt) { + if assignment.Tok != token.DEFINE { + return + } + + scopeMarker := time.Now().UnixNano() + + for i, el := range assignment.Lhs { + ident, ok := el.(*ast.Ident) + if !ok { + continue + } + + if ident.Name == "_" || ident.Obj == nil { + continue + } + + if markeredOccs, ok := nom[ident.Name]; ok { + markeredOccs[scopeMarker] = occurrence{ + declarationPos: ident.Pos(), + } + nom[ident.Name] = markeredOccs + } else { + newOcc := occurrence{} + if areFlagSettingsSatisfied(pass, assignment, i) { + newOcc.declarationPos = ident.Pos() + } + nom[ident.Name] = scopeMarkeredOccurences{scopeMarker: newOcc} + } + } +} + +func areFlagSettingsSatisfied(pass *analysis.Pass, assignment *ast.AssignStmt, i int) bool { + lh := assignment.Lhs[i] + rh := assignment.Rhs[len(assignment.Rhs)-1] + + if len(assignment.Rhs) == len(assignment.Lhs) { + rh = assignment.Rhs[i] + } + + if pass.Fset.Position(rh.End()).Line-pass.Fset.Position(rh.Pos()).Line > maxDeclLines { + return false + } + if int(rh.End()-lh.Pos()) > maxDeclChars { + return false + } + return true +} + +func (nom namedOccurrenceMap) addFromCondition(stmt *ast.IfStmt) { + switch v := stmt.Cond.(type) { + case *ast.BinaryExpr: + for _, v := range [2]ast.Expr{v.X, v.Y} { + switch e := v.(type) { + case *ast.Ident: + nom.addFromIdent(stmt.If, e) + case *ast.SelectorExpr: + nom.addFromIdent(stmt.If, e.X) + } + } + case *ast.CallExpr: + for _, a := range v.Args { + switch e := a.(type) { + case *ast.Ident: + nom.addFromIdent(stmt.If, e) + case *ast.CallExpr: + nom.addFromCallExpr(stmt.If, e) + } + } + } +} + +func (nom namedOccurrenceMap) addFromIfClause(stmt *ast.IfStmt) { + nom.addFromBlockStmt(stmt.Body, stmt.If) +} + +func (nom namedOccurrenceMap) addFromElseClause(stmt *ast.IfStmt) { + nom.addFromBlockStmt(stmt.Else, stmt.If) +} + +func (nom namedOccurrenceMap) addFromBlockStmt(stmt ast.Stmt, ifPos token.Pos) { + blockStmt, ok := stmt.(*ast.BlockStmt) + if !ok { + return + } + + for _, el := range blockStmt.List { + exptStmt, ok := el.(*ast.ExprStmt) + if !ok { + continue + } + + if callExpr, ok := exptStmt.X.(*ast.CallExpr); ok { + nom.addFromCallExpr(ifPos, callExpr) + } + } +} + +func (nom namedOccurrenceMap) addFromCallExpr(ifPos token.Pos, callExpr *ast.CallExpr) { + for _, arg := range callExpr.Args { + nom.addFromIdent(ifPos, arg) + } +} + +func (nom namedOccurrenceMap) addFromIdent(ifPos token.Pos, v ast.Expr) { + ident, ok := v.(*ast.Ident) + if !ok { + return + } + + if markeredOccs, ok := nom[ident.Name]; ok { + marker := nom[ident.Name].getGreatestMarker() + + occ := markeredOccs[marker] + if occ.isComplete() { + return + } + + occ.ifStmtPos = ifPos + nom[ident.Name][marker] = occ + } +} diff --git a/vendor/github.com/fatih/color/.travis.yml b/vendor/github.com/fatih/color/.travis.yml deleted file mode 100644 index 95f8a1f..0000000 --- a/vendor/github.com/fatih/color/.travis.yml +++ /dev/null @@ -1,5 +0,0 @@ -language: go -go: - - 1.8.x - - tip - diff --git a/vendor/github.com/fatih/color/Gopkg.lock b/vendor/github.com/fatih/color/Gopkg.lock deleted file mode 100644 index 7d879e9..0000000 --- a/vendor/github.com/fatih/color/Gopkg.lock +++ /dev/null @@ -1,27 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - name = "github.com/mattn/go-colorable" - packages = ["."] - revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072" - version = "v0.0.9" - -[[projects]] - name = "github.com/mattn/go-isatty" - packages = ["."] - revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39" - version = "v0.0.3" - -[[projects]] - branch = "master" - name = "golang.org/x/sys" - packages = ["unix"] - revision = "37707fdb30a5b38865cfb95e5aab41707daec7fd" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - inputs-digest = "e8a50671c3cb93ea935bf210b1cd20702876b9d9226129be581ef646d1565cdc" - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/vendor/github.com/fatih/color/Gopkg.toml b/vendor/github.com/fatih/color/Gopkg.toml deleted file mode 100644 index ff1617f..0000000 --- a/vendor/github.com/fatih/color/Gopkg.toml +++ /dev/null @@ -1,30 +0,0 @@ - -# Gopkg.toml example -# -# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md -# for detailed Gopkg.toml documentation. -# -# required = ["github.com/user/thing/cmd/thing"] -# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] -# -# [[constraint]] -# name = "github.com/user/project" -# version = "1.0.0" -# -# [[constraint]] -# name = "github.com/user/project2" -# branch = "dev" -# source = "github.com/myfork/project2" -# -# [[override]] -# name = "github.com/x/y" -# version = "2.4.0" - - -[[constraint]] - name = "github.com/mattn/go-colorable" - version = "0.0.9" - -[[constraint]] - name = "github.com/mattn/go-isatty" - version = "0.0.3" diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md index 3fc9544..d62e402 100644 --- a/vendor/github.com/fatih/color/README.md +++ b/vendor/github.com/fatih/color/README.md @@ -1,14 +1,11 @@ -# Color [![GoDoc](https://godoc.org/github.com/fatih/color?status.svg)](https://godoc.org/github.com/fatih/color) [![Build Status](https://img.shields.io/travis/fatih/color.svg?style=flat-square)](https://travis-ci.org/fatih/color) - - +# color [![](https://github.com/fatih/color/workflows/build/badge.svg)](https://github.com/fatih/color/actions) [![PkgGoDev](https://pkg.go.dev/badge/github.com/fatih/color)](https://pkg.go.dev/github.com/fatih/color) Color lets you use colorized outputs in terms of [ANSI Escape Codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) in Go (Golang). It has support for Windows too! The API can be used in several ways, pick one that suits you. - -![Color](https://i.imgur.com/c1JI0lA.png) +![Color](https://user-images.githubusercontent.com/438920/96832689-03b3e000-13f4-11eb-9803-46f4c4de3406.jpg) ## Install @@ -17,9 +14,6 @@ suits you. go get github.com/fatih/color ``` -Note that the `vendor` folder is here for stability. Remove the folder if you -already have the dependencies in your GOPATH. - ## Examples ### Standard colors diff --git a/vendor/github.com/fatih/color/go.mod b/vendor/github.com/fatih/color/go.mod new file mode 100644 index 0000000..7887281 --- /dev/null +++ b/vendor/github.com/fatih/color/go.mod @@ -0,0 +1,8 @@ +module github.com/fatih/color + +go 1.13 + +require ( + github.com/mattn/go-colorable v0.1.8 + github.com/mattn/go-isatty v0.0.12 +) diff --git a/vendor/github.com/fatih/color/go.sum b/vendor/github.com/fatih/color/go.sum new file mode 100644 index 0000000..54f7c46 --- /dev/null +++ b/vendor/github.com/fatih/color/go.sum @@ -0,0 +1,7 @@ +github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae h1:/WDfKMnPU+m5M4xB+6x4kaepxRw6jWvR5iDRdvjHgy8= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go b/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go index 47d12f0..272c218 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go @@ -5,15 +5,15 @@ import ( "go/token" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/astp" "golang.org/x/tools/go/ast/astutil" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "appendAssign" info.Tags = []string{"diagnostic"} info.Summary = "Detects suspicious append result assignments" @@ -24,14 +24,14 @@ p.negatives = append(p.negatives, y)` p.positives = append(p.positives, x) p.negatives = append(p.negatives, y)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&appendAssignChecker{ctx: ctx}) }) } type appendAssignChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *appendAssignChecker) VisitStmt(stmt ast.Stmt) { @@ -81,7 +81,7 @@ func (c *appendAssignChecker) checkAppend(x ast.Expr, call *ast.CallExpr) { switch y := call.Args[0].(type) { case *ast.SliceExpr: - if _, ok := c.ctx.TypesInfo.TypeOf(y.X).(*types.Array); ok { + if _, ok := c.ctx.TypeOf(y.X).(*types.Array); ok { // Arrays are frequently used as scratch storages. return } diff --git a/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go b/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go index 63f5d9f..a761f2a 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go @@ -4,14 +4,14 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "appendCombine" info.Tags = []string{"performance"} info.Summary = "Detects `append` chains to the same slice that can be done in a single `append` call" @@ -20,14 +20,14 @@ xs = append(xs, 1) xs = append(xs, 2)` info.After = `xs = append(xs, 1, 2)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmtList(&appendCombineChecker{ctx: ctx}) }) } type appendCombineChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *appendCombineChecker) VisitStmtList(list []ast.Stmt) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/argOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/argOrder_checker.go index 85a6f7c..5fff6a7 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/argOrder_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/argOrder_checker.go @@ -4,8 +4,8 @@ import ( "go/ast" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astp" @@ -13,28 +13,28 @@ import ( ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "argOrder" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{"diagnostic"} info.Summary = "Detects suspicious arguments order" info.Before = `strings.HasPrefix("#", userpass)` info.After = `strings.HasPrefix(userpass, "#")` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&argOrderChecker{ctx: ctx}) }) } type argOrderChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *argOrderChecker) VisitExpr(expr ast.Expr) { call := astcast.ToCallExpr(expr) // For now only handle functions of 2 args. - // TODO(Quasilyte): generalize the algorithm and add more patterns. + // TODO(quasilyte): generalize the algorithm and add more patterns. if len(call.Args) != 2 { return } @@ -59,23 +59,22 @@ func (c *argOrderChecker) VisitExpr(expr ast.Expr) { } func (c *argOrderChecker) isConstLiteral(x ast.Expr) bool { - if c.ctx.TypesInfo.Types[x].Value != nil { - return true - } - // Also permit byte slices. switch x := x.(type) { + case *ast.BasicLit: + return true + case *ast.CallExpr: // Handle `[]byte("abc")` as well. if len(x.Args) != 1 || !astp.IsBasicLit(x.Args[0]) { return false } - typ, ok := c.ctx.TypesInfo.TypeOf(x.Fun).(*types.Slice) + typ, ok := c.ctx.TypeOf(x.Fun).(*types.Slice) return ok && typep.HasUint8Kind(typ.Elem()) case *ast.CompositeLit: // Check if it's a const byte slice. - typ, ok := c.ctx.TypesInfo.TypeOf(x).(*types.Slice) + typ, ok := c.ctx.TypeOf(x).(*types.Slice) if !ok || !typep.HasUint8Kind(typ.Elem()) { return false } diff --git a/vendor/github.com/go-critic/go-critic/checkers/assignOp_checker.go b/vendor/github.com/go-critic/go-critic/checkers/assignOp_checker.go index eb34286..e3acd09 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/assignOp_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/assignOp_checker.go @@ -4,29 +4,29 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "assignOp" info.Tags = []string{"style"} info.Summary = "Detects assignments that can be simplified by using assignment operators" info.Before = `x = x * 2` info.After = `x *= 2` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&assignOpChecker{ctx: ctx}) }) } type assignOpChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *assignOpChecker) VisitStmt(stmt ast.Stmt) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/badCall_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badCall_checker.go index 150cc69..3e96a39 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/badCall_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/badCall_checker.go @@ -3,28 +3,28 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "badCall" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{"diagnostic"} info.Summary = "Detects suspicious function calls" info.Before = `strings.Replace(s, from, to, 0)` info.After = `strings.Replace(s, from, to, -1)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&badCallChecker{ctx: ctx}) }) } type badCallChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *badCallChecker) VisitExpr(expr ast.Expr) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go index 466a89c..47f994f 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go @@ -5,9 +5,9 @@ import ( "go/constant" "go/token" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" @@ -16,9 +16,9 @@ import ( ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "badCond" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{"diagnostic"} info.Summary = "Detects suspicious condition expressions" info.Before = ` for i := 0; i > n; i++ { @@ -29,14 +29,14 @@ for i := 0; i < n; i++ { xs[i] = 0 }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForFuncDecl(&badCondChecker{ctx: ctx}) }) } type badCondChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *badCondChecker) VisitFuncDecl(decl *ast.FuncDecl) { @@ -52,7 +52,7 @@ func (c *badCondChecker) VisitFuncDecl(decl *ast.FuncDecl) { } func (c *badCondChecker) checkExpr(expr ast.Expr) { - // TODO(Quasilyte): recognize more patterns. + // TODO(quasilyte): recognize more patterns. cond := astcast.ToBinaryExpr(expr) lhs := astcast.ToBinaryExpr(astutil.Unparen(cond.X)) @@ -102,7 +102,7 @@ func (c *badCondChecker) lessAndGreater(lhs, rhs *ast.BinaryExpr) bool { } func (c *badCondChecker) checkForStmt(stmt *ast.ForStmt) { - // TODO(Quasilyte): handle other kinds of bad conditionals. + // TODO(quasilyte): handle other kinds of bad conditionals. init := astcast.ToAssignStmt(stmt.Init) if init.Tok != token.DEFINE || len(init.Lhs) != 1 || len(init.Rhs) != 1 { diff --git a/vendor/github.com/go-critic/go-critic/checkers/badLock_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badLock_checker.go new file mode 100644 index 0000000..5177161 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/badLock_checker.go @@ -0,0 +1,116 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "badLock" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects suspicious mutex lock/unlock operations" + info.Before = ` +mu.Lock() +mu.Unlock()` + info.After = ` +mu.Lock() +defer mu.Unlock()` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return astwalk.WalkerForStmtList(&badLockChecker{ctx: ctx}) + }) +} + +type badLockChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *badLockChecker) VisitStmtList(list []ast.Stmt) { + if len(list) < 2 { + return + } + + for i := 0; i < len(list)-1; i++ { + current, ok := list[i].(*ast.ExprStmt) + if !ok { + continue + } + deferred := false + var next ast.Expr + switch x := list[i+1].(type) { + case *ast.ExprStmt: + next = x.X + case *ast.DeferStmt: + next = x.Call + deferred = true + default: + continue + } + + mutex1, lockFunc, ok := c.asLockedMutex(current.X) + if !ok { + continue + } + mutex2, unlockFunc, ok := c.asUnlockedMutex(next) + if !ok { + continue + } + if !astequal.Expr(mutex1, mutex2) { + continue + } + + switch { + case !deferred: + c.warnImmediateUnlock(mutex2) + case lockFunc == "Lock" && unlockFunc == "RUnlock": + c.warnMismatchingUnlock(mutex2, "Unlock") + case lockFunc == "RLock" && unlockFunc == "Unlock": + c.warnMismatchingUnlock(mutex2, "RUnlock") + } + } +} + +func (c *badLockChecker) asLockedMutex(e ast.Expr) (ast.Expr, string, bool) { + call, ok := e.(*ast.CallExpr) + if !ok || len(call.Args) != 0 { + return nil, "", false + } + switch fn := call.Fun.(type) { + case *ast.SelectorExpr: + if fn.Sel.Name == "Lock" || fn.Sel.Name == "RLock" { + return fn.X, fn.Sel.Name, true + } + return nil, "", false + default: + return nil, "", false + } +} + +func (c *badLockChecker) asUnlockedMutex(e ast.Expr) (ast.Expr, string, bool) { + call, ok := e.(*ast.CallExpr) + if !ok || len(call.Args) != 0 { + return nil, "", false + } + switch fn := call.Fun.(type) { + case *ast.SelectorExpr: + if fn.Sel.Name == "Unlock" || fn.Sel.Name == "RUnlock" { + return fn.X, fn.Sel.Name, true + } + return nil, "", false + default: + return nil, "", false + } +} + +func (c *badLockChecker) warnImmediateUnlock(cause ast.Node) { + c.ctx.Warn(cause, "defer is missing, mutex is unlocked immediately") +} + +func (c *badLockChecker) warnMismatchingUnlock(cause ast.Node, suggestion string) { + c.ctx.Warn(cause, "suspicious unlock, maybe %s was intended?", suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go new file mode 100644 index 0000000..1025454 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go @@ -0,0 +1,445 @@ +package checkers + +import ( + "go/ast" + "go/constant" + "sort" + "strconv" + "unicode" + "unicode/utf8" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/quasilyte/regex/syntax" +) + +func init() { + var info linter.CheckerInfo + info.Name = "badRegexp" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects suspicious regexp patterns" + info.Before = "regexp.MustCompile(`(?:^aa|bb|cc)foo[aba]`)" + info.After = "regexp.MustCompile(`^(?:aa|bb|cc)foo[ab]`)" + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + opts := &syntax.ParserOptions{} + c := &badRegexpChecker{ + ctx: ctx, + parser: syntax.NewParser(opts), + } + return astwalk.WalkerForExpr(c) + }) +} + +type badRegexpChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + parser *syntax.Parser + cause ast.Expr + + flagStates []regexpFlagState + goodAnchors []syntax.Position +} + +type regexpFlagState [utf8.RuneSelf]bool + +func (c *badRegexpChecker) VisitExpr(x ast.Expr) { + call, ok := x.(*ast.CallExpr) + if !ok { + return + } + + switch qualifiedName(call.Fun) { + case "regexp.Compile", "regexp.MustCompile": + cv := c.ctx.TypesInfo.Types[call.Args[0]].Value + if cv == nil || cv.Kind() != constant.String { + return + } + pat := constant.StringVal(cv) + c.cause = call.Args[0] + c.checkPattern(pat) + } +} + +func (c *badRegexpChecker) checkPattern(pat string) { + re, err := c.parser.Parse(pat) + if err != nil { + return + } + + c.flagStates = c.flagStates[:0] + c.goodAnchors = c.goodAnchors[:0] + + // In Go all flags (modifiers) are set to false by default, + // so we start from the empty flag set. + c.flagStates = append(c.flagStates, regexpFlagState{}) + + c.markGoodCarets(re.Expr) + c.walk(re.Expr) +} + +func (c *badRegexpChecker) markGoodCarets(e syntax.Expr) { + canSkip := func(e syntax.Expr) bool { + switch e.Op { + case syntax.OpFlagOnlyGroup: + return true + case syntax.OpGroup: + x := e.Args[0] + return x.Op == syntax.OpConcat && len(x.Args) == 0 + } + return false + } + + if e.Op == syntax.OpConcat && len(e.Args) > 1 { + i := 0 + for i < len(e.Args) && canSkip(e.Args[i]) { + i++ + } + if i < len(e.Args) { + c.markGoodCarets(e.Args[i]) + } + return + } + if e.Op == syntax.OpCaret { + c.addGoodAnchor(e.Pos) + } + for _, a := range e.Args { + c.markGoodCarets(a) + } +} + +func (c *badRegexpChecker) walk(e syntax.Expr) { + switch e.Op { + case syntax.OpAlt: + c.checkAltAnchor(e) + c.checkAltDups(e) + for _, a := range e.Args { + c.walk(a) + } + + case syntax.OpCharClass, syntax.OpNegCharClass: + if c.checkCharClassRanges(e) { + c.checkCharClassDups(e) + } + + case syntax.OpStar, syntax.OpPlus: + c.checkNestedQuantifier(e) + c.walk(e.Args[0]) + + case syntax.OpFlagOnlyGroup: + c.updateFlagState(c.currentFlagState(), e, e.Args[0].Value) + case syntax.OpGroupWithFlags: + // Creates a new context using the current context copy. + // New flags are evaluated inside a new context. + // After nested expressions are processed, previous context is restored. + nflags := len(c.flagStates) + c.flagStates = append(c.flagStates, *c.currentFlagState()) + c.updateFlagState(c.currentFlagState(), e, e.Args[1].Value) + c.walk(e.Args[0]) + c.flagStates = c.flagStates[:nflags] + case syntax.OpGroup, syntax.OpCapture, syntax.OpNamedCapture: + // Like with OpGroupWithFlags, but doesn't evaluate any new flags. + nflags := len(c.flagStates) + c.flagStates = append(c.flagStates, *c.currentFlagState()) + c.walk(e.Args[0]) + c.flagStates = c.flagStates[:nflags] + + case syntax.OpCaret: + if !c.isGoodAnchor(e) { + c.warn("dangling or redundant ^, maybe \\^ is intended?") + } + + default: + for _, a := range e.Args { + c.walk(a) + } + } +} + +func (c *badRegexpChecker) currentFlagState() *regexpFlagState { + return &c.flagStates[len(c.flagStates)-1] +} + +func (c *badRegexpChecker) updateFlagState(state *regexpFlagState, e syntax.Expr, flagString string) { + clearing := false + for i := 0; i < len(flagString); i++ { + ch := flagString[i] + if ch == '-' { + clearing = true + continue + } + if int(ch) >= len(state) { + continue // Should never happen in practice, but we don't want a panic + } + + if clearing { + if !state[ch] { + c.warn("clearing unset flag %c in %s", ch, e.Value) + } + } else { + if state[ch] { + c.warn("redundant flag %c in %s", ch, e.Value) + } + } + state[ch] = !clearing + } +} + +func (c *badRegexpChecker) checkNestedQuantifier(e syntax.Expr) { + x := e.Args[0] + switch x.Op { + case syntax.OpGroup, syntax.OpCapture, syntax.OpGroupWithFlags: + if len(e.Args) == 1 { + x = x.Args[0] + } + } + + switch x.Op { + case syntax.OpPlus, syntax.OpStar: + c.warn("repeated greedy quantifier in %s", e.Value) + } +} + +func (c *badRegexpChecker) checkAltDups(alt syntax.Expr) { + // Seek duplicated alternation expressions. + + set := make(map[string]struct{}, len(alt.Args)) + for _, a := range alt.Args { + if _, ok := set[a.Value]; ok { + c.warn("`%s` is duplicated in %s", a.Value, alt.Value) + } + set[a.Value] = struct{}{} + } +} + +func (c *badRegexpChecker) isCharOrLit(e syntax.Expr) bool { + return e.Op == syntax.OpChar || e.Op == syntax.OpLiteral +} + +func (c *badRegexpChecker) checkAltAnchor(alt syntax.Expr) { + // Seek suspicious anchors. + + // Case 1: an alternation of literals where 1st expr begins with ^ anchor. + first := alt.Args[0] + if first.Op == syntax.OpConcat && len(first.Args) == 2 && first.Args[0].Op == syntax.OpCaret && c.isCharOrLit(first.Args[1]) { + matched := true + for _, a := range alt.Args[1:] { + if !c.isCharOrLit(a) { + matched = false + break + } + } + if matched { + c.warn("^ applied only to `%s` in %s", first.Value[len(`^`):], alt.Value) + } + } + + // Case 2: an alternation of literals where last expr ends with $ anchor. + last := alt.Args[len(alt.Args)-1] + if last.Op == syntax.OpConcat && len(last.Args) == 2 && last.Args[1].Op == syntax.OpDollar && c.isCharOrLit(last.Args[0]) { + matched := true + for _, a := range alt.Args[:len(alt.Args)-1] { + if !c.isCharOrLit(a) { + matched = false + break + } + } + if matched { + c.warn("$ applied only to `%s` in %s", last.Value[:len(last.Value)-len(`$`)], alt.Value) + } + } +} + +func (c *badRegexpChecker) checkCharClassRanges(cc syntax.Expr) bool { + // Seek for suspicious ranges like `!-_`. + // + // We permit numerical ranges (0-9, hex and octal literals) + // and simple ascii letter ranges. + + for _, e := range cc.Args { + if e.Op != syntax.OpCharRange { + continue + } + switch e.Args[0].Op { + case syntax.OpEscapeOctal, syntax.OpEscapeHex: + continue + } + ch := c.charClassBoundRune(e.Args[0]) + if ch == 0 { + return false + } + good := unicode.IsLetter(ch) || (ch >= '0' && ch <= '9') + if !good { + c.warnSloppyCharRange(e.Value, cc.Value) + } + } + + return true +} + +func (c *badRegexpChecker) checkCharClassDups(cc syntax.Expr) { + // Seek for excessive elements inside a character class. + // Report them as intersections. + + if len(cc.Args) == 1 { + return // Can't had duplicates. + } + + type charRange struct { + low rune + high rune + source string + } + ranges := make([]charRange, 0, 8) + addRange := func(source string, low, high rune) { + ranges = append(ranges, charRange{source: source, low: low, high: high}) + } + addRange1 := func(source string, ch rune) { + addRange(source, ch, ch) + } + + // 1. Collect ranges, O(n). + for _, e := range cc.Args { + switch e.Op { + case syntax.OpEscapeOctal: + addRange1(e.Value, c.octalToRune(e)) + case syntax.OpEscapeHex: + addRange1(e.Value, c.hexToRune(e)) + case syntax.OpChar: + addRange1(e.Value, c.stringToRune(e.Value)) + case syntax.OpCharRange: + addRange(e.Value, c.charClassBoundRune(e.Args[0]), c.charClassBoundRune(e.Args[1])) + case syntax.OpEscapeMeta: + addRange1(e.Value, rune(e.Value[1])) + case syntax.OpEscapeChar: + ch := c.stringToRune(e.Value[len(`\`):]) + if unicode.IsPunct(ch) { + addRange1(e.Value, ch) + break + } + switch e.Value { + case `\|`, `\<`, `\>`, `\+`, `\=`: // How to cover all symbols? + addRange1(e.Value, c.stringToRune(e.Value[len(`\`):])) + case `\t`: + addRange1(e.Value, '\t') + case `\n`: + addRange1(e.Value, '\n') + case `\r`: + addRange1(e.Value, '\r') + case `\v`: + addRange1(e.Value, '\v') + case `\d`: + addRange(e.Value, '0', '9') + case `\D`: + addRange(e.Value, 0, '0'-1) + addRange(e.Value, '9'+1, utf8.MaxRune) + case `\s`: + addRange(e.Value, '\t', '\n') // 9-10 + addRange(e.Value, '\f', '\r') // 12-13 + addRange1(e.Value, ' ') // 32 + case `\S`: + addRange(e.Value, 0, '\t'-1) + addRange(e.Value, '\n'+1, '\f'-1) + addRange(e.Value, '\r'+1, ' '-1) + addRange(e.Value, ' '+1, utf8.MaxRune) + case `\w`: + addRange(e.Value, '0', '9') // 48-57 + addRange(e.Value, 'A', 'Z') // 65-90 + addRange1(e.Value, '_') // 95 + addRange(e.Value, 'a', 'z') // 97-122 + case `\W`: + addRange(e.Value, 0, '0'-1) + addRange(e.Value, '9'+1, 'A'-1) + addRange(e.Value, 'Z'+1, '_'-1) + addRange(e.Value, '_'+1, 'a'-1) + addRange(e.Value, 'z'+1, utf8.MaxRune) + default: + // Give up: unknown escape sequence. + return + } + default: + // Give up: unexpected operation inside char class. + return + } + } + + // 2. Sort ranges, O(nlogn). + sort.Slice(ranges, func(i, j int) bool { + return ranges[i].low < ranges[j].low + }) + + // 3. Search for duplicates, O(n). + for i := 0; i < len(ranges)-1; i++ { + x := ranges[i+0] + y := ranges[i+1] + if x.high >= y.low { + c.warnCharClassDup(x.source, y.source, cc.Value) + break + } + } +} + +func (c *badRegexpChecker) charClassBoundRune(e syntax.Expr) rune { + switch e.Op { + case syntax.OpChar: + return c.stringToRune(e.Value) + case syntax.OpEscapeHex: + return c.hexToRune(e) + case syntax.OpEscapeOctal: + return c.octalToRune(e) + default: + return 0 + } +} + +func (c *badRegexpChecker) octalToRune(e syntax.Expr) rune { + v, _ := strconv.ParseInt(e.Value[len(`\`):], 8, 32) + return rune(v) +} + +func (c *badRegexpChecker) hexToRune(e syntax.Expr) rune { + var s string + switch e.Form { + case syntax.FormEscapeHexFull: + s = e.Value[len(`\x{`) : len(e.Value)-len(`}`)] + default: + s = e.Value[len(`\x`):] + } + v, _ := strconv.ParseInt(s, 16, 32) + return rune(v) +} + +func (c *badRegexpChecker) stringToRune(s string) rune { + ch, _ := utf8.DecodeRuneInString(s) + return ch +} + +func (c *badRegexpChecker) addGoodAnchor(pos syntax.Position) { + c.goodAnchors = append(c.goodAnchors, pos) +} + +func (c *badRegexpChecker) isGoodAnchor(e syntax.Expr) bool { + for _, pos := range c.goodAnchors { + if e.Pos == pos { + return true + } + } + return false +} + +func (c *badRegexpChecker) warn(format string, args ...interface{}) { + c.ctx.Warn(c.cause, format, args...) +} + +func (c *badRegexpChecker) warnSloppyCharRange(rng, charClass string) { + c.ctx.Warn(c.cause, "suspicious char range `%s` in %s", rng, charClass) +} + +func (c *badRegexpChecker) warnCharClassDup(x, y, charClass string) { + if x == y { + c.ctx.Warn(c.cause, "`%s` is duplicated in %s", x, charClass) + } else { + c.ctx.Warn(c.cause, "`%s` intersects with `%s` in %s", x, y, charClass) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go b/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go index f4eb9ed..657d8a8 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go @@ -6,9 +6,9 @@ import ( "go/token" "strconv" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" @@ -18,7 +18,7 @@ import ( ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "boolExprSimplify" info.Tags = []string{"style", "experimental"} info.Summary = "Detects bool expressions that can be simplified" @@ -29,14 +29,14 @@ b := !(x) == !(y)` a := elapsed < expectElapsedMin b := (x) == (y)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&boolExprSimplifyChecker{ctx: ctx}) }) } type boolExprSimplifyChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext hasFloats bool } @@ -47,7 +47,7 @@ func (c *boolExprSimplifyChecker) VisitExpr(x ast.Expr) { // Throw away non-bool expressions and avoid redundant // AST copying below. - if typ := c.ctx.TypesInfo.TypeOf(x); typ == nil || !typep.HasBoolKind(typ.Underlying()) { + if typ := c.ctx.TypeOf(x); typ == nil || !typep.HasBoolKind(typ.Underlying()) { return } @@ -55,8 +55,8 @@ func (c *boolExprSimplifyChecker) VisitExpr(x ast.Expr) { // this is why we record valuable info before doing it. c.hasFloats = lintutil.ContainsNode(x, func(n ast.Node) bool { if x, ok := n.(*ast.BinaryExpr); ok { - return typep.HasFloatProp(c.ctx.TypesInfo.TypeOf(x.X).Underlying()) || - typep.HasFloatProp(c.ctx.TypesInfo.TypeOf(x.Y).Underlying()) + return typep.HasFloatProp(c.ctx.TypeOf(x.X).Underlying()) || + typep.HasFloatProp(c.ctx.TypeOf(x.Y).Underlying()) } return false }) @@ -289,7 +289,8 @@ func (c *boolExprSimplifyChecker) foldRanges(cur *astutil.Cursor) bool { // `x >= c && x <= c` => `x == c` {token.GEQ, token.LEQ, 0, 0}, } - for _, comb := range combTable { + for i := range combTable { + comb := combTable[i] if match(&comb) { lhs.Op = token.EQL v := c1 + comb.resDelta @@ -310,7 +311,8 @@ func (c *boolExprSimplifyChecker) foldRanges(cur *astutil.Cursor) bool { // `x <= c || x >= c+2` => `x != c+1` {token.LEQ, token.GEQ, 2, 1}, } - for _, comb := range combTable { + for i := range combTable { + comb := combTable[i] if match(&comb) { lhs.Op = token.NEQ v := c1 + comb.resDelta @@ -325,7 +327,7 @@ func (c *boolExprSimplifyChecker) foldRanges(cur *astutil.Cursor) bool { } func (c *boolExprSimplifyChecker) int64val(x ast.Expr) (int64, bool) { - // TODO(Quasilyte): if we had types info, we could use TypesInfo.Types[x].Value, + // TODO(quasilyte): if we had types info, we could use TypesInfo.Types[x].Value, // but since copying erases leaves us without it, only basic literals are handled. lit, ok := x.(*ast.BasicLit) if !ok { diff --git a/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go b/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go new file mode 100644 index 0000000..616c0a2 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go @@ -0,0 +1,63 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "builtinShadowDecl" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects top-level declarations that shadow the predeclared identifiers" + info.Before = `type int struct {}` + info.After = `type myInt struct {}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return &builtinShadowDeclChecker{ctx: ctx} + }) +} + +type builtinShadowDeclChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *builtinShadowDeclChecker) WalkFile(f *ast.File) { + for _, decl := range f.Decls { + switch decl := decl.(type) { + case *ast.FuncDecl: + // Don't check methods. They can shadow anything safely. + if decl.Recv == nil { + c.checkName(decl.Name) + } + case *ast.GenDecl: + c.visitGenDecl(decl) + } + } +} + +func (c *builtinShadowDeclChecker) visitGenDecl(decl *ast.GenDecl) { + for _, spec := range decl.Specs { + switch spec := spec.(type) { + case *ast.ValueSpec: + for _, name := range spec.Names { + c.checkName(name) + } + case *ast.TypeSpec: + c.checkName(spec.Name) + } + } +} + +func (c *builtinShadowDeclChecker) checkName(name *ast.Ident) { + if isBuiltin(name.Name) { + c.warn(name) + } +} + +func (c *builtinShadowDeclChecker) warn(ident *ast.Ident) { + c.ctx.Warn(ident, "shadowing of predeclared identifier: %s", ident) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go b/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go index 24d8b7f..745494e 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go @@ -3,26 +3,26 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "builtinShadow" info.Tags = []string{"style", "opinionated"} - info.Summary = "Detects when predeclared identifiers shadowed in assignments" + info.Summary = "Detects when predeclared identifiers are shadowed in assignments" info.Before = `len := 10` info.After = `length := 10` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForLocalDef(&builtinShadowChecker{ctx: ctx}, ctx.TypesInfo) }) } type builtinShadowChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *builtinShadowChecker) VisitLocalDef(name astwalk.Name, _ ast.Expr) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go b/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go index bc9a211..76b6fb4 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go @@ -3,15 +3,15 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "captLocal" info.Tags = []string{"style"} - info.Params = lintpack.CheckerParams{ + info.Params = linter.CheckerParams{ "paramsOnly": { Value: true, Usage: "whether to restrict checker to params only", @@ -21,7 +21,7 @@ func init() { info.Before = `func f(IN int, OUT *int) (ERR error) {}` info.After = `func f(in int, out *int) (err error) {}` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &captLocalChecker{ctx: ctx} c.paramsOnly = info.Params.Bool("paramsOnly") return astwalk.WalkerForLocalDef(c, ctx.TypesInfo) @@ -30,7 +30,7 @@ func init() { type captLocalChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext paramsOnly bool } diff --git a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go index 1ef4b53..2d49273 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go @@ -4,12 +4,12 @@ import ( "go/ast" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "caseOrder" info.Tags = []string{"diagnostic"} info.Summary = "Detects erroneous case order inside switch statements" @@ -28,14 +28,14 @@ case ast.Expr: fmt.Println("expr") }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&caseOrderChecker{ctx: ctx}) }) } type caseOrderChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *caseOrderChecker) VisitStmt(stmt ast.Stmt) { @@ -56,9 +56,9 @@ func (c *caseOrderChecker) checkTypeSwitch(s *ast.TypeSwitchStmt) { for _, cc := range s.Body.List { cc := cc.(*ast.CaseClause) for _, x := range cc.List { - typ := c.ctx.TypesInfo.TypeOf(x) - if typ == nil { - c.warnTypeImpl(cc, x) + typ := c.ctx.TypeOf(x) + if typ == linter.UnknownType { + c.warnUnknownType(cc, x) return } for _, iface := range ifaces { @@ -78,11 +78,11 @@ func (c *caseOrderChecker) warnTypeSwitch(cause, concrete, iface ast.Node) { c.ctx.Warn(cause, "case %s must go before the %s case", concrete, iface) } -func (c *caseOrderChecker) warnTypeImpl(cause, concrete ast.Node) { +func (c *caseOrderChecker) warnUnknownType(cause, concrete ast.Node) { c.ctx.Warn(cause, "type is not defined %s", concrete) } func (c *caseOrderChecker) checkSwitch(s *ast.SwitchStmt) { - // TODO(Quasilyte): can handle expression cases that overlap. + // TODO(quasilyte): can handle expression cases that overlap. // Cases that have narrower value range should go before wider ones. } diff --git a/vendor/github.com/go-critic/go-critic/checkers/checkers.go b/vendor/github.com/go-critic/go-critic/checkers/checkers.go index 9620222..0c2ebc0 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/checkers.go +++ b/vendor/github.com/go-critic/go-critic/checkers/checkers.go @@ -2,9 +2,18 @@ package checkers import ( - "github.com/go-lintpack/lintpack" + "os" + + "github.com/go-critic/go-critic/framework/linter" ) -var collection = &lintpack.CheckerCollection{ +var collection = &linter.CheckerCollection{ URL: "https://github.com/go-critic/go-critic/checkers", } + +var debug = func() func() bool { + v := os.Getenv("DEBUG") != "" + return func() bool { + return v + } +}() diff --git a/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go b/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go index 14d89da..fa1bc12 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go @@ -5,19 +5,19 @@ import ( "regexp" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "codegenComment" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{"diagnostic"} info.Summary = "Detects malformed 'code generated' file comments" info.Before = `// This file was automatically generated by foogen` info.After = `// Code generated by foogen. DO NOT EDIT.` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { patterns := []string{ "this (?:file|code) (?:was|is) auto(?:matically)? generated", "this (?:file|code) (?:was|is) generated automatically", @@ -26,7 +26,7 @@ func init() { "this (?:file|code) (?:was|is) generated", "code in this file (?:was|is) auto(?:matically)? generated", "generated (?:file|code) - do not edit", - // TODO(Quasilyte): more of these. + // TODO(quasilyte): more of these. } re := regexp.MustCompile("(?i)" + strings.Join(patterns, "|")) return &codegenCommentChecker{ @@ -38,7 +38,7 @@ func init() { type codegenCommentChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext badCommentRE *regexp.Regexp } diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go index ed75015..467809e 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go @@ -7,23 +7,25 @@ import ( "unicode" "unicode/utf8" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "commentFormatting" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{"style"} info.Summary = "Detects comments with non-idiomatic formatting" info.Before = `//This is a comment` info.After = `// This is a comment` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { parts := []string{ - `^//\w+:.*$`, //key: value - `^//nolint$`, //nolint - `^//line /.*:\d+`, //line /path/to/file:123 + `^//go:generate .*$`, // e.g.: go:generate value + `^//\w+:.*$`, // e.g.: key: value + `^//nolint\b`, // e.g.: nolint + `^//line /.*:\d+`, // e.g.: line /path/to/file:123 + `^//export \w+$`, // e.g.: export Foo } pat := "(?m)" + strings.Join(parts, "|") pragmaRE := regexp.MustCompile(pat) @@ -36,7 +38,7 @@ func init() { type commentFormattingChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext pragmaRE *regexp.Regexp } diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go index 0554e36..8d93870 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go @@ -7,13 +7,13 @@ import ( "regexp" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/strparse" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "commentedOutCode" info.Tags = []string{"diagnostic", "experimental"} info.Summary = "Detects commented-out code inside function bodies" @@ -22,7 +22,7 @@ func init() { foo(1, 2)` info.After = `foo(1, 2)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForLocalComment(&commentedOutCodeChecker{ ctx: ctx, notQuiteFuncCall: regexp.MustCompile(`\w+\s+\([^)]*\)\s*$`), @@ -32,7 +32,7 @@ foo(1, 2)` type commentedOutCodeChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext fn *ast.FuncDecl notQuiteFuncCall *regexp.Regexp diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go index 5aeb86c..a532ad1 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go @@ -5,12 +5,12 @@ import ( "go/token" "regexp" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "commentedOutImport" info.Tags = []string{"style", "experimental"} info.Summary = "Detects commented-out imports" @@ -24,7 +24,7 @@ import ( "fmt" )` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { const pattern = `(?m)^(?://|/\*)?\s*"([a-zA-Z0-9_/]+)"\s*(?:\*/)?$` return &commentedOutImportChecker{ ctx: ctx, @@ -35,13 +35,13 @@ import ( type commentedOutImportChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext importStringRE *regexp.Regexp } func (c *commentedOutImportChecker) WalkFile(f *ast.File) { - // TODO(Quasilyte): handle commented-out import spec, + // TODO(quasilyte): handle commented-out import spec, // for example: // import "errors". for _, decl := range f.Decls { diff --git a/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go index caa0de6..755449e 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go @@ -3,12 +3,12 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "defaultCaseOrder" info.Tags = []string{"style"} info.Summary = "Detects when default case in switch isn't on 1st or last position" @@ -31,14 +31,14 @@ default: // <- last case (could also be the first one) // ... }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&defaultCaseOrderChecker{ctx: ctx}) }) } type defaultCaseOrderChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *defaultCaseOrderChecker) VisitStmt(stmt ast.Stmt) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/deferUnlambda_checker.go b/vendor/github.com/go-critic/go-critic/checkers/deferUnlambda_checker.go new file mode 100644 index 0000000..3cab782 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/deferUnlambda_checker.go @@ -0,0 +1,94 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "deferUnlambda" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects deferred function literals that can be simplified" + info.Before = `defer func() { f() }()` + info.After = `f()` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return astwalk.WalkerForStmt(&deferUnlambdaChecker{ctx: ctx}) + }) +} + +type deferUnlambdaChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *deferUnlambdaChecker) VisitStmt(x ast.Stmt) { + def, ok := x.(*ast.DeferStmt) + if !ok { + return + } + + // We don't analyze deferred function args. + // Most deferred calls don't have them, so it's not a big deal to skip them. + if len(def.Call.Args) != 0 { + return + } + + fn, ok := def.Call.Fun.(*ast.FuncLit) + if !ok { + return + } + + if len(fn.Body.List) != 1 { + return + } + + call, ok := astcast.ToExprStmt(fn.Body.List[0]).X.(*ast.CallExpr) + if !ok || !c.isFunctionCall(call) { + return + } + + // Skip recover() as it can't be moved outside of the lambda. + // Skip panic() to avoid affecting the stack trace. + switch qualifiedName(call.Fun) { + case "recover", "panic": + return + } + + for _, arg := range call.Args { + if !c.isConstExpr(arg) { + return + } + } + + c.warn(def, call) +} + +func (c *deferUnlambdaChecker) isFunctionCall(e *ast.CallExpr) bool { + switch fnExpr := e.Fun.(type) { + case *ast.Ident: + return true + case *ast.SelectorExpr: + x, ok := fnExpr.X.(*ast.Ident) + if !ok { + return false + } + _, ok = c.ctx.TypesInfo.ObjectOf(x).(*types.PkgName) + return ok + default: + return false + } +} + +func (c *deferUnlambdaChecker) isConstExpr(e ast.Expr) bool { + return c.ctx.TypesInfo.Types[e].Value != nil +} + +func (c *deferUnlambdaChecker) warn(cause, suggestion ast.Node) { + c.ctx.Warn(cause, "can rewrite as `defer %s`", suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go b/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go index 3767573..82f300b 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go @@ -5,14 +5,14 @@ import ( "regexp" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "deprecatedComment" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{"diagnostic"} info.Summary = "Detects malformed 'deprecated' doc-comments" info.Before = ` // deprecated, use FuncNew instead @@ -21,7 +21,7 @@ func FuncOld() int` // Deprecated: use FuncNew instead func FuncOld() int` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &deprecatedCommentChecker{ctx: ctx} c.commonPatterns = []*regexp.Regexp{ @@ -61,7 +61,7 @@ func FuncOld() int` type deprecatedCommentChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext commonPatterns []*regexp.Regexp commonTypos []string diff --git a/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go b/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go index 5c771b3..2a3b393 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go @@ -6,12 +6,12 @@ import ( "regexp" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "docStub" info.Tags = []string{"style", "experimental"} info.Summary = "Detects comments that silence go lint complaints about doc-comment" @@ -26,7 +26,7 @@ func Foo() {} // Foo is a demonstration-only function. func Foo() {}` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { re := `(?i)^\.\.\.$|^\.$|^xxx\.?$|^whatever\.?$` c := &docStubChecker{ ctx: ctx, @@ -38,7 +38,7 @@ func Foo() {}` type docStubChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext stubCommentRE *regexp.Regexp } diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupArg_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupArg_checker.go index 8197594..24e921b 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/dupArg_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/dupArg_checker.go @@ -4,21 +4,21 @@ import ( "go/ast" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "dupArg" info.Tags = []string{"diagnostic"} info.Summary = "Detects suspicious duplicated arguments" info.Before = `copy(dst, dst)` info.After = `copy(dst, src)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &dupArgChecker{ctx: ctx} // newMatcherFunc returns a function that matches a call if // args[xIndex] and args[yIndex] are equal. @@ -101,7 +101,7 @@ func init() { type dupArgChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext matchers map[string]func(*ast.CallExpr) bool } diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go index a138848..3399f05 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go @@ -3,13 +3,13 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astequal" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "dupBranchBody" info.Tags = []string{"diagnostic"} info.Summary = "Detects duplicated branch bodies inside conditional statements" @@ -26,14 +26,14 @@ if cond { println("cond=false") }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&dupBranchBodyChecker{ctx: ctx}) }) } type dupBranchBodyChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *dupBranchBodyChecker) VisitStmt(stmt ast.Stmt) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go index 26ef173..89ec66b 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go @@ -3,13 +3,13 @@ package checkers import ( "go/ast" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "dupCase" info.Tags = []string{"diagnostic"} info.Summary = "Detects duplicated case clauses inside switch statements" @@ -22,14 +22,14 @@ switch x { case ys[0], ys[1], ys[2], ys[3], ys[4]: }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&dupCaseChecker{ctx: ctx}) }) } type dupCaseChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext astSet lintutil.AstSet } diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go index d531413..27b796c 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go @@ -4,11 +4,11 @@ import ( "fmt" "go/ast" - "github.com/go-lintpack/lintpack" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "dupImport" info.Tags = []string{"style", "experimental"} info.Summary = "Detects multiple imports of the same package under different aliases" @@ -22,13 +22,13 @@ import( "fmt" )` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return &dupImportChecker{ctx: ctx} }) } type dupImportChecker struct { - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *dupImportChecker) WalkFile(f *ast.File) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go index 24bb524..dd5cdbf 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go @@ -5,14 +5,14 @@ import ( "go/token" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "dupSubExpr" info.Tags = []string{"diagnostic"} info.Summary = "Detects suspicious duplicated sub-expressions" @@ -25,7 +25,7 @@ sort.Slice(xs, func(i, j int) bool { return xs[i].v < xs[j].v })` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &dupSubExprChecker{ctx: ctx} ops := []struct { @@ -65,7 +65,7 @@ sort.Slice(xs, func(i, j int) bool { type dupSubExprChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext // opSet is a set of binary operations that do not make // sense with duplicated (same) RHS and LHS. @@ -93,7 +93,7 @@ func (c *dupSubExprChecker) checkBinaryExpr(expr *ast.BinaryExpr) { } func (c *dupSubExprChecker) resultIsFloat(expr ast.Expr) bool { - typ, ok := c.ctx.TypesInfo.TypeOf(expr).(*types.Basic) + typ, ok := c.ctx.TypeOf(expr).(*types.Basic) return ok && typ.Info()&types.IsFloat != 0 } diff --git a/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go b/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go index c3a9546..9e56d1c 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go @@ -3,16 +3,16 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astp" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "elseif" info.Tags = []string{"style"} - info.Params = lintpack.CheckerParams{ + info.Params = linter.CheckerParams{ "skipBalanced": { Value: true, Usage: "whether to skip balanced if-else pairs", @@ -30,7 +30,7 @@ if cond1 { } else if x := cond2; x { }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &elseifChecker{ctx: ctx} c.skipBalanced = info.Params.Bool("skipBalanced") return astwalk.WalkerForStmt(c) @@ -39,7 +39,7 @@ if cond1 { type elseifChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext skipBalanced bool } diff --git a/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go b/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go index 5908dfa..16bfa7e 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go @@ -4,12 +4,12 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "emptyFallthrough" info.Tags = []string{"style", "experimental"} info.Summary = "Detects fallthrough that can be avoided by using multi case values" @@ -24,14 +24,14 @@ case reflect.Int, reflect.Int32: return Int }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&emptyFallthroughChecker{ctx: ctx}) }) } type emptyFallthroughChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *emptyFallthroughChecker) VisitStmt(stmt ast.Stmt) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/emptyStringTest_checker.go b/vendor/github.com/go-critic/go-critic/checkers/emptyStringTest_checker.go index a7be906..c5e18ca 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/emptyStringTest_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/emptyStringTest_checker.go @@ -4,15 +4,15 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "emptyStringTest" info.Tags = []string{"style", "experimental"} info.Summary = "Detects empty string checks that can be written more idiomatically" @@ -20,14 +20,14 @@ func init() { info.After = `s == ""` info.Note = "See https://dmitri.shuralyov.com/idiomatic-go#empty-string-check." - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&emptyStringTestChecker{ctx: ctx}) }) } type emptyStringTestChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *emptyStringTestChecker) VisitExpr(e ast.Expr) { @@ -40,7 +40,7 @@ func (c *emptyStringTestChecker) VisitExpr(e ast.Expr) { return } s := lenCall.Args[0] - if !typep.HasStringProp(c.ctx.TypesInfo.TypeOf(s)) { + if !typep.HasStringProp(c.ctx.TypeOf(s)) { return } zero := astcast.ToBasicLit(cmp.Y) diff --git a/vendor/github.com/go-critic/go-critic/checkers/equalFold_checker.go b/vendor/github.com/go-critic/go-critic/checkers/equalFold_checker.go index 265b2f7..b8dfdc0 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/equalFold_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/equalFold_checker.go @@ -4,28 +4,28 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "equalFold" info.Tags = []string{"performance", "experimental"} info.Summary = "Detects unoptimal strings/bytes case-insensitive comparison" info.Before = `strings.ToLower(x) == strings.ToLower(y)` info.After = `strings.EqualFold(x, y)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&equalFoldChecker{ctx: ctx}) }) } type equalFoldChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *equalFoldChecker) VisitExpr(e ast.Expr) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go index f76519c..93adf26 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go @@ -5,16 +5,16 @@ import ( "go/token" "go/types" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "evalOrder" info.Tags = []string{"diagnostic", "experimental"} info.Summary = "Detects unwanted dependencies on the evaluation order" @@ -24,14 +24,14 @@ err := f(&x) return x, err ` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&evalOrderChecker{ctx: ctx}) }) } type evalOrderChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *evalOrderChecker) VisitStmt(stmt ast.Stmt) { @@ -75,7 +75,7 @@ func (c *evalOrderChecker) VisitStmt(stmt ast.Stmt) { } func (c *evalOrderChecker) hasPtrRecv(fn *ast.Ident) bool { - sig, ok := c.ctx.TypesInfo.TypeOf(fn).(*types.Signature) + sig, ok := c.ctx.TypeOf(fn).(*types.Signature) if !ok { return false } diff --git a/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go b/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go index 05ed6ae..bc3024b 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go @@ -3,17 +3,17 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astfmt" "github.com/go-toolsmith/astp" "golang.org/x/tools/go/ast/astutil" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "exitAfterDefer" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{"diagnostic"} info.Summary = "Detects calls to exit/fatal inside functions that use defer" info.Before = ` defer os.Remove(filename) @@ -27,18 +27,18 @@ if bad { return }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForFuncDecl(&exitAfterDeferChecker{ctx: ctx}) }) } type exitAfterDeferChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *exitAfterDeferChecker) VisitFuncDecl(fn *ast.FuncDecl) { - // TODO(Quasilyte): handle goto and other kinds of flow that break + // TODO(quasilyte): handle goto and other kinds of flow that break // the algorithm below that expects the latter statement to be // executed after the ones that come before it. @@ -52,6 +52,14 @@ func (c *exitAfterDeferChecker) VisitFuncDecl(fn *ast.FuncDecl) { case *ast.DeferStmt: deferStmt = n case *ast.CallExpr: + // See #995. We allow `defer os.Exit()` calls + // as it's harder to determine whether they're going + // to clutter anything without actually trying to + // simulate the defer stack + understanding the control flow. + // TODO: can we use CFG here? + if _, ok := cur.Parent().(*ast.DeferStmt); ok { + return true + } if deferStmt != nil { switch qualifiedName(n.Fun) { case "log.Fatal", "log.Fatalf", "log.Fatalln", "os.Exit": @@ -66,13 +74,11 @@ func (c *exitAfterDeferChecker) VisitFuncDecl(fn *ast.FuncDecl) { } func (c *exitAfterDeferChecker) warn(cause *ast.CallExpr, deferStmt *ast.DeferStmt) { - var s string + s := astfmt.Sprint(deferStmt) if fnlit, ok := deferStmt.Call.Fun.(*ast.FuncLit); ok { // To avoid long and multi-line warning messages, // collapse the function literals. s = "defer " + astfmt.Sprint(fnlit.Type) + "{...}(...)" - } else { - s = astfmt.Sprint(deferStmt) } - c.ctx.Warn(cause, "%s clutters `%s`", cause.Fun, s) + c.ctx.Warn(cause, "%s will exit, and `%s` will not run", cause.Fun, s) } diff --git a/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go b/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go new file mode 100644 index 0000000..b11dc24 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go @@ -0,0 +1,50 @@ +package checkers + +import ( + "go/ast" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "filepathJoin" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects problems in filepath.Join() function calls" + info.Before = `filepath.Join("dir/", filename)` + info.After = `filepath.Join("dir", filename)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return astwalk.WalkerForExpr(&filepathJoinChecker{ctx: ctx}) + }) +} + +type filepathJoinChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *filepathJoinChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + if qualifiedName(call.Fun) != "filepath.Join" { + return + } + + for _, arg := range call.Args { + arg, ok := arg.(*ast.BasicLit) + if ok && c.hasSeparator(arg) { + c.warnSeparator(arg) + } + } +} + +func (c *filepathJoinChecker) hasSeparator(v *ast.BasicLit) bool { + return strings.ContainsAny(v.Value, `/\`) +} + +func (c *filepathJoinChecker) warnSeparator(sep ast.Expr) { + c.ctx.Warn(sep, "%s contains a path separator", sep) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/flagDeref_checker.go b/vendor/github.com/go-critic/go-critic/checkers/flagDeref_checker.go index cb9faee..393274c 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/flagDeref_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/flagDeref_checker.go @@ -3,12 +3,12 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "flagDeref" info.Tags = []string{"diagnostic"} info.Summary = "Detects immediate dereferencing of `flag` package pointers" @@ -21,7 +21,7 @@ flag.BoolVar(&b, "b", false, "b docs")` Dereferencing returned pointers will lead to hard to find errors where flag values are not updated after flag.Parse().` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &flagDerefChecker{ ctx: ctx, flagPtrFuncs: map[string]bool{ @@ -41,7 +41,7 @@ where flag values are not updated after flag.Parse().` type flagDerefChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext flagPtrFuncs map[string]bool } diff --git a/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go b/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go index 1d43ba5..02f1968 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go @@ -6,27 +6,28 @@ import ( "go/types" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "flagName" - info.Tags = []string{"diagnostic", "experimental"} - info.Summary = "Detects flag names with whitespace" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects suspicious flag names" info.Before = `b := flag.Bool(" foo ", false, "description")` info.After = `b := flag.Bool("foo", false, "description")` + info.Note = "https://github.com/golang/go/issues/41792" - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&flagNameChecker{ctx: ctx}) }) } type flagNameChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *flagNameChecker) VisitExpr(expr ast.Expr) { @@ -58,11 +59,30 @@ func (c *flagNameChecker) checkFlagName(call *ast.CallExpr, arg ast.Expr) { return // Non-constant name } name := constant.StringVal(cv) - if strings.Contains(name, " ") { + switch { + case name == "": + c.warnEmpty(call) + case strings.HasPrefix(name, "-"): + c.warnHypenPrefix(call, name) + case strings.Contains(name, "="): + c.warnEq(call, name) + case strings.Contains(name, " "): c.warnWhitespace(call, name) } } +func (c *flagNameChecker) warnEmpty(cause ast.Node) { + c.ctx.Warn(cause, "empty flag name") +} + +func (c *flagNameChecker) warnHypenPrefix(cause ast.Node, name string) { + c.ctx.Warn(cause, "flag name %q should not start with a hypen", name) +} + +func (c *flagNameChecker) warnEq(cause ast.Node, name string) { + c.ctx.Warn(cause, "flag name %q should not contain '='", name) +} + func (c *flagNameChecker) warnWhitespace(cause ast.Node, name string) { c.ctx.Warn(cause, "flag name %q contains whitespace", name) } diff --git a/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go b/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go index a700314..f3f9c53 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go @@ -5,13 +5,13 @@ import ( "go/token" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "hexLiteral" info.Tags = []string{"style", "experimental"} info.Summary = "Detects hex literals that have mixed case letter digits" @@ -25,14 +25,14 @@ y := 0xff // (B) y := 0xFF` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&hexLiteralChecker{ctx: ctx}) }) } type hexLiteralChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *hexLiteralChecker) warn0X(lit *ast.BasicLit) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go index 656b4cc..53c8c36 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go @@ -3,15 +3,15 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "hugeParam" info.Tags = []string{"performance"} - info.Params = lintpack.CheckerParams{ + info.Params = linter.CheckerParams{ "sizeThreshold": { Value: 80, Usage: "size in bytes that makes the warning trigger", @@ -21,7 +21,7 @@ func init() { info.Before = `func f(x [1024]int) {}` info.After = `func f(x *[1024]int) {}` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForFuncDecl(&hugeParamChecker{ ctx: ctx, sizeThreshold: int64(info.Params.Int("sizeThreshold")), @@ -31,7 +31,7 @@ func init() { type hugeParamChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext sizeThreshold int64 } @@ -48,7 +48,7 @@ func (c *hugeParamChecker) VisitFuncDecl(decl *ast.FuncDecl) { func (c *hugeParamChecker) checkParams(params []*ast.Field) { for _, p := range params { for _, id := range p.Names { - typ := c.ctx.TypesInfo.TypeOf(id) + typ := c.ctx.TypeOf(id) size := c.ctx.SizesInfo.Sizeof(typ) if size >= c.sizeThreshold { c.warn(id, size) diff --git a/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go index c0a456a..91e1cfb 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go @@ -3,12 +3,12 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "ifElseChain" info.Tags = []string{"style"} info.Summary = "Detects repeated if-else statements and suggests to replace them with switch statement" @@ -34,14 +34,14 @@ Permits single else or else-if; repeated else-if or else + else-if will trigger suggestion to use switch statement. See [EffectiveGo#switch](https://golang.org/doc/effective_go.html#switch).` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&ifElseChainChecker{ctx: ctx}) }) } type ifElseChainChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext cause *ast.IfStmt visited map[*ast.IfStmt]bool diff --git a/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go b/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go index 9a2ccc5..60c0ab2 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go @@ -4,12 +4,12 @@ import ( "go/ast" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "importShadow" info.Tags = []string{"style", "opinionated"} info.Summary = "Detects when imported package names shadowed in the assignments" @@ -19,7 +19,7 @@ filepath := "foo.txt"` info.After = ` filename := "foo.txt"` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { ctx.Require.PkgObjects = true return astwalk.WalkerForLocalDef(&importShadowChecker{ctx: ctx}, ctx.TypesInfo) }) @@ -27,7 +27,7 @@ filename := "foo.txt"` type importShadowChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *importShadowChecker) VisitLocalDef(def astwalk.Name, _ ast.Expr) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/indexAlloc_checker.go b/vendor/github.com/go-critic/go-critic/checkers/indexAlloc_checker.go index 8fbe98c..7339988 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/indexAlloc_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/indexAlloc_checker.go @@ -3,14 +3,14 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "indexAlloc" info.Tags = []string{"performance"} info.Summary = "Detects strings.Index calls that may cause unwanted allocs" @@ -18,14 +18,14 @@ func init() { info.After = `bytes.Index(x, []byte(y))` info.Note = `See Go issue for details: https://github.com/golang/go/issues/25864` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&indexAllocChecker{ctx: ctx}) }) } type indexAllocChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *indexAllocChecker) VisitExpr(e ast.Expr) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go b/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go index bfbd661..91e8816 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go @@ -3,13 +3,13 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astp" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "initClause" info.Tags = []string{"style", "opinionated", "experimental"} info.Summary = "Detects non-assignment statements inside if/switch init clause" @@ -19,14 +19,14 @@ func init() { if cond { }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&initClauseChecker{ctx: ctx}) }) } type initClauseChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *initClauseChecker) VisitStmt(stmt ast.Stmt) { diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/comment_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/comment_walker.go similarity index 100% rename from vendor/github.com/go-lintpack/lintpack/astwalk/comment_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/comment_walker.go diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/doc_comment_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/doc_comment_walker.go similarity index 100% rename from vendor/github.com/go-lintpack/lintpack/astwalk/doc_comment_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/doc_comment_walker.go diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/expr_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go similarity index 95% rename from vendor/github.com/go-lintpack/lintpack/astwalk/expr_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go index 64098b2..de66c10 100644 --- a/vendor/github.com/go-lintpack/lintpack/astwalk/expr_walker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go @@ -1,6 +1,8 @@ package astwalk -import "go/ast" +import ( + "go/ast" +) type exprWalker struct { visitor ExprVisitor diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/func_decl_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go similarity index 93% rename from vendor/github.com/go-lintpack/lintpack/astwalk/func_decl_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go index 90d921f..c7e3a43 100644 --- a/vendor/github.com/go-lintpack/lintpack/astwalk/func_decl_walker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go @@ -1,6 +1,8 @@ package astwalk -import "go/ast" +import ( + "go/ast" +) type funcDeclWalker struct { visitor FuncDeclVisitor diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/local_comment_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_comment_walker.go similarity index 100% rename from vendor/github.com/go-lintpack/lintpack/astwalk/local_comment_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_comment_walker.go diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/local_def_visitor.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go similarity index 100% rename from vendor/github.com/go-lintpack/lintpack/astwalk/local_def_visitor.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/local_def_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_walker.go similarity index 100% rename from vendor/github.com/go-lintpack/lintpack/astwalk/local_def_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_walker.go diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/local_expr_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go similarity index 95% rename from vendor/github.com/go-lintpack/lintpack/astwalk/local_expr_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go index 951fd97..e455b3f 100644 --- a/vendor/github.com/go-lintpack/lintpack/astwalk/local_expr_walker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go @@ -1,6 +1,8 @@ package astwalk -import "go/ast" +import ( + "go/ast" +) type localExprWalker struct { visitor LocalExprVisitor diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/stmt_list_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go similarity index 96% rename from vendor/github.com/go-lintpack/lintpack/astwalk/stmt_list_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go index 1cc0493..45c406e 100644 --- a/vendor/github.com/go-lintpack/lintpack/astwalk/stmt_list_walker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go @@ -1,6 +1,8 @@ package astwalk -import "go/ast" +import ( + "go/ast" +) type stmtListWalker struct { visitor StmtListVisitor diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/stmt_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go similarity index 95% rename from vendor/github.com/go-lintpack/lintpack/astwalk/stmt_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go index 722eeb1..912de86 100644 --- a/vendor/github.com/go-lintpack/lintpack/astwalk/stmt_walker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go @@ -1,6 +1,8 @@ package astwalk -import "go/ast" +import ( + "go/ast" +) type stmtWalker struct { visitor StmtVisitor diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/type_expr_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/type_expr_walker.go similarity index 100% rename from vendor/github.com/go-lintpack/lintpack/astwalk/type_expr_walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/type_expr_walker.go diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/visitor.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go similarity index 100% rename from vendor/github.com/go-lintpack/lintpack/astwalk/visitor.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/walk_handler.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go similarity index 100% rename from vendor/github.com/go-lintpack/lintpack/astwalk/walk_handler.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go diff --git a/vendor/github.com/go-lintpack/lintpack/astwalk/walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go similarity index 70% rename from vendor/github.com/go-lintpack/lintpack/astwalk/walker.go rename to vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go index fddae71..cd5e1c9 100644 --- a/vendor/github.com/go-lintpack/lintpack/astwalk/walker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go @@ -3,55 +3,55 @@ package astwalk import ( "go/types" - "github.com/go-lintpack/lintpack" + "github.com/go-critic/go-critic/framework/linter" ) // WalkerForFuncDecl returns file walker implementation for FuncDeclVisitor. -func WalkerForFuncDecl(v FuncDeclVisitor) lintpack.FileWalker { +func WalkerForFuncDecl(v FuncDeclVisitor) linter.FileWalker { return &funcDeclWalker{visitor: v} } // WalkerForExpr returns file walker implementation for ExprVisitor. -func WalkerForExpr(v ExprVisitor) lintpack.FileWalker { +func WalkerForExpr(v ExprVisitor) linter.FileWalker { return &exprWalker{visitor: v} } // WalkerForLocalExpr returns file walker implementation for LocalExprVisitor. -func WalkerForLocalExpr(v LocalExprVisitor) lintpack.FileWalker { +func WalkerForLocalExpr(v LocalExprVisitor) linter.FileWalker { return &localExprWalker{visitor: v} } // WalkerForStmtList returns file walker implementation for StmtListVisitor. -func WalkerForStmtList(v StmtListVisitor) lintpack.FileWalker { +func WalkerForStmtList(v StmtListVisitor) linter.FileWalker { return &stmtListWalker{visitor: v} } // WalkerForStmt returns file walker implementation for StmtVisitor. -func WalkerForStmt(v StmtVisitor) lintpack.FileWalker { +func WalkerForStmt(v StmtVisitor) linter.FileWalker { return &stmtWalker{visitor: v} } // WalkerForTypeExpr returns file walker implementation for TypeExprVisitor. -func WalkerForTypeExpr(v TypeExprVisitor, info *types.Info) lintpack.FileWalker { +func WalkerForTypeExpr(v TypeExprVisitor, info *types.Info) linter.FileWalker { return &typeExprWalker{visitor: v, info: info} } // WalkerForLocalComment returns file walker implementation for LocalCommentVisitor. -func WalkerForLocalComment(v LocalCommentVisitor) lintpack.FileWalker { +func WalkerForLocalComment(v LocalCommentVisitor) linter.FileWalker { return &localCommentWalker{visitor: v} } // WalkerForComment returns file walker implementation for CommentVisitor. -func WalkerForComment(v CommentVisitor) lintpack.FileWalker { +func WalkerForComment(v CommentVisitor) linter.FileWalker { return &commentWalker{visitor: v} } // WalkerForDocComment returns file walker implementation for DocCommentVisitor. -func WalkerForDocComment(v DocCommentVisitor) lintpack.FileWalker { +func WalkerForDocComment(v DocCommentVisitor) linter.FileWalker { return &docCommentWalker{visitor: v} } // WalkerForLocalDef returns file walker implementation for LocalDefVisitor. -func WalkerForLocalDef(v LocalDefVisitor, info *types.Info) lintpack.FileWalker { +func WalkerForLocalDef(v LocalDefVisitor, info *types.Info) linter.FileWalker { return &localDefWalker{visitor: v, info: info} } diff --git a/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go b/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go index de3e781..5059091 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go @@ -5,18 +5,18 @@ import ( "go/types" "strings" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astp" "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "mapKey" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{"diagnostic"} info.Summary = "Detects suspicious map literal keys" info.Before = ` _ = map[string]int{ @@ -29,14 +29,14 @@ _ = map[string]int{ "bar": 2, }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&mapKeyChecker{ctx: ctx}) }) } type mapKeyChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext astSet lintutil.AstSet } @@ -47,7 +47,7 @@ func (c *mapKeyChecker) VisitExpr(expr ast.Expr) { return } - typ, ok := c.ctx.TypesInfo.TypeOf(lit).Underlying().(*types.Map) + typ, ok := c.ctx.TypeOf(lit).Underlying().(*types.Map) if !ok { return } diff --git a/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go b/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go index 60da116..efd6311 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go @@ -4,15 +4,15 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "methodExprCall" info.Tags = []string{"style", "experimental"} info.Summary = "Detects method expression call that can be replaced with a method call" @@ -21,14 +21,14 @@ foo.bar(f)` info.After = `f := foo{} f.bar()` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&methodExprCallChecker{ctx: ctx}) }) } type methodExprCallChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *methodExprCallChecker) VisitExpr(x ast.Expr) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go b/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go index 4a0331d..de02c7e 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go @@ -3,15 +3,15 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "nestingReduce" info.Tags = []string{"style", "opinionated", "experimental"} - info.Params = lintpack.CheckerParams{ + info.Params = linter.CheckerParams{ "bodyWidth": { Value: 5, Usage: "min number of statements inside a branch to trigger a warning", @@ -32,7 +32,7 @@ for _, v := range a { body() }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &nestingReduceChecker{ctx: ctx} c.bodyWidth = info.Params.Int("bodyWidth") return astwalk.WalkerForStmt(c) @@ -41,7 +41,7 @@ for _, v := range a { type nestingReduceChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext bodyWidth int } diff --git a/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go b/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go index 75e7f64..86757a7 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go @@ -3,36 +3,36 @@ package checkers import ( "go/ast" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "golang.org/x/tools/go/ast/astutil" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "newDeref" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{"style"} info.Summary = "Detects immediate dereferencing of `new` expressions" info.Before = `x := *new(bool)` info.After = `x := false` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&newDerefChecker{ctx: ctx}) }) } type newDerefChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *newDerefChecker) VisitExpr(expr ast.Expr) { deref := astcast.ToStarExpr(expr) call := astcast.ToCallExpr(deref.X) if astcast.ToIdent(call.Fun).Name == "new" { - typ := c.ctx.TypesInfo.TypeOf(call.Args[0]) + typ := c.ctx.TypeOf(call.Args[0]) zv := lintutil.ZeroValueOf(astutil.Unparen(call.Args[0]), typ) if zv != nil { c.warn(expr, zv) diff --git a/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go b/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go index 231e258..37f964f 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go @@ -4,14 +4,14 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "nilValReturn" info.Tags = []string{"diagnostic", "experimental"} info.Summary = "Detects return statements those results evaluate to nil" @@ -29,14 +29,14 @@ if err != nil { return err }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&nilValReturnChecker{ctx: ctx}) }) } type nilValReturnChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *nilValReturnChecker) VisitStmt(stmt ast.Stmt) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go b/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go index e40ec6d..a1d9680 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go @@ -5,20 +5,20 @@ import ( "go/token" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "octalLiteral" info.Tags = []string{"diagnostic", "experimental"} info.Summary = "Detects octal literals passed to functions" info.Before = `foo(02)` info.After = `foo(2)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &octalLiteralChecker{ ctx: ctx, octFriendlyPkg: map[string]bool{ @@ -32,7 +32,7 @@ func init() { type octalLiteralChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext octFriendlyPkg map[string]bool } diff --git a/vendor/github.com/go-critic/go-critic/checkers/offBy1_checker.go b/vendor/github.com/go-critic/go-critic/checkers/offBy1_checker.go index d5c8de0..485c4b2 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/offBy1_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/offBy1_checker.go @@ -4,8 +4,8 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" @@ -13,33 +13,33 @@ import ( ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "offBy1" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{"diagnostic"} info.Summary = "Detects various off-by-one kind of errors" info.Before = `xs[len(xs)]` info.After = `xs[len(xs)-1]` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&offBy1Checker{ctx: ctx}) }) } type offBy1Checker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *offBy1Checker) VisitExpr(e ast.Expr) { - // TODO(Quasilyte): handle more off-by-1 patterns. - // TODO(Quasilyte): check whether go/analysis can help here. + // TODO(quasilyte): handle more off-by-1 patterns. + // TODO(quasilyte): check whether go/analysis can help here. // Detect s[len(s)] expressions that always panic. // The correct form is s[len(s)-1]. indexExpr := astcast.ToIndexExpr(e) indexed := indexExpr.X - if !typep.IsSlice(c.ctx.TypesInfo.TypeOf(indexed)) { + if !typep.IsSlice(c.ctx.TypeOf(indexed)) { return } if !typep.SideEffectFree(c.ctx.TypesInfo, indexed) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go b/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go index ffa7406..f9f9d6c 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go @@ -3,27 +3,27 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astequal" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "paramTypeCombine" info.Tags = []string{"style", "opinionated"} info.Summary = "Detects if function parameters could be combined by type and suggest the way to do it" info.Before = `func foo(a, b int, c, d int, e, f int, g int) {}` info.After = `func foo(a, b, c, d, e, f, g int) {}` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForFuncDecl(¶mTypeCombineChecker{ctx: ctx}) }) } type paramTypeCombineChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *paramTypeCombineChecker) EnterFunc(*ast.FuncDecl) bool { @@ -51,7 +51,8 @@ func (c *paramTypeCombineChecker) optimizeParams(params *ast.FieldList) *ast.Fie // ast.Field have empty name list. skip := params == nil || len(params.List) < 2 || - len(params.List[0].Names) == 0 + len(params.List[0].Names) == 0 || + c.paramsAreMultiLine(params) if skip { return params } @@ -84,3 +85,9 @@ func (c *paramTypeCombineChecker) optimizeParams(params *ast.FieldList) *ast.Fie func (c *paramTypeCombineChecker) warn(f1, f2 *ast.FuncType) { c.ctx.Warn(f1, "%s could be replaced with %s", f1, f2) } + +func (c *paramTypeCombineChecker) paramsAreMultiLine(params *ast.FieldList) bool { + startPos := c.ctx.FileSet.Position(params.Opening) + endPos := c.ctx.FileSet.Position(params.Closing) + return startPos.Line != endPos.Line +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go index dacffc8..9694fc5 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go @@ -4,26 +4,26 @@ import ( "go/ast" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "ptrToRefParam" info.Tags = []string{"style", "opinionated", "experimental"} info.Summary = "Detects input and output parameters that have a type of pointer to referential type" info.Before = `func f(m *map[string]int) (*chan *int)` info.After = `func f(m map[string]int) (chan *int)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForFuncDecl(&ptrToRefParamChecker{ctx: ctx}) }) } type ptrToRefParamChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *ptrToRefParamChecker) VisitFuncDecl(fn *ast.FuncDecl) { @@ -35,7 +35,7 @@ func (c *ptrToRefParamChecker) VisitFuncDecl(fn *ast.FuncDecl) { func (c *ptrToRefParamChecker) checkParams(params []*ast.Field) { for _, param := range params { - ptr, ok := c.ctx.TypesInfo.TypeOf(param.Type).(*types.Pointer) + ptr, ok := c.ctx.TypeOf(param.Type).(*types.Pointer) if !ok { continue } diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go index 387d1bb..90b5987 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go @@ -4,15 +4,15 @@ import ( "go/ast" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "rangeExprCopy" info.Tags = []string{"performance"} - info.Params = lintpack.CheckerParams{ + info.Params = linter.CheckerParams{ "sizeThreshold": { Value: 512, Usage: "size in bytes that makes the warning trigger", @@ -36,7 +36,7 @@ for _, x := range &xs { // No copy }` info.Note = "See Go issue for details: https://github.com/golang/go/issues/15812." - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &rangeExprCopyChecker{ctx: ctx} c.sizeThreshold = int64(info.Params.Int("sizeThreshold")) c.skipTestFuncs = info.Params.Bool("skipTestFuncs") @@ -46,7 +46,7 @@ for _, x := range &xs { // No copy type rangeExprCopyChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext sizeThreshold int64 skipTestFuncs bool diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go index 182538a..9eafaec 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go @@ -3,15 +3,15 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "rangeValCopy" info.Tags = []string{"performance"} - info.Params = lintpack.CheckerParams{ + info.Params = linter.CheckerParams{ "sizeThreshold": { Value: 128, Usage: "size in bytes that makes the warning trigger", @@ -35,7 +35,7 @@ for i := range xs { // Loop body. }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &rangeValCopyChecker{ctx: ctx} c.sizeThreshold = int64(info.Params.Int("sizeThreshold")) c.skipTestFuncs = info.Params.Bool("skipTestFuncs") @@ -45,7 +45,7 @@ for i := range xs { type rangeValCopyChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext sizeThreshold int64 skipTestFuncs bool @@ -61,7 +61,7 @@ func (c *rangeValCopyChecker) VisitStmt(stmt ast.Stmt) { if !ok || rng.Value == nil { return } - typ := c.ctx.TypesInfo.TypeOf(rng.Value) + typ := c.ctx.TypeOf(rng.Value) if typ == nil { return } diff --git a/vendor/github.com/go-critic/go-critic/checkers/regexpMust_checker.go b/vendor/github.com/go-critic/go-critic/checkers/regexpMust_checker.go index ef7a397..411932e 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/regexpMust_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/regexpMust_checker.go @@ -4,28 +4,28 @@ import ( "go/ast" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astp" "golang.org/x/tools/go/ast/astutil" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "regexpMust" info.Tags = []string{"style"} info.Summary = "Detects `regexp.Compile*` that can be replaced with `regexp.MustCompile*`" info.Before = `re, _ := regexp.Compile("const pattern")` info.After = `re := regexp.MustCompile("const pattern")` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(®expMustChecker{ctx: ctx}) }) } type regexpMustChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *regexpMustChecker) VisitExpr(x ast.Expr) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go b/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go index 383deb5..018ab42 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go @@ -6,19 +6,19 @@ import ( "regexp" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "regexpPattern" info.Tags = []string{"diagnostic", "experimental"} info.Summary = "Detects suspicious regexp patterns" info.Before = "regexp.MustCompile(`google.com|yandex.ru`)" info.After = "regexp.MustCompile(`google\\.com|yandex\\.ru`)" - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { domains := []string{ "com", "org", @@ -39,7 +39,7 @@ func init() { type regexpPatternChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext domainRE *regexp.Regexp } diff --git a/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go b/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go new file mode 100644 index 0000000..10dcb32 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go @@ -0,0 +1,511 @@ +package checkers + +import ( + "fmt" + "go/ast" + "go/constant" + "log" + "strings" + "unicode/utf8" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/quasilyte/regex/syntax" +) + +func init() { + var info linter.CheckerInfo + info.Name = "regexpSimplify" + info.Tags = []string{"style", "experimental", "opinionated"} + info.Summary = "Detects regexp patterns that can be simplified" + info.Before = "regexp.MustCompile(`(?:a|b|c) [a-z][a-z]*`)" + info.After = "regexp.MustCompile(`[abc] {3}[a-z]+`)" + + // TODO(quasilyte): add params to control most opinionated replacements + // like `[0-9] -> \d` + // `[[:digit:]] -> \d` + // `[A-Za-z0-9_]` -> `\w` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + opts := &syntax.ParserOptions{ + NoLiterals: true, + } + c := ®expSimplifyChecker{ + ctx: ctx, + parser: syntax.NewParser(opts), + out: &strings.Builder{}, + } + return astwalk.WalkerForExpr(c) + }) +} + +type regexpSimplifyChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + parser *syntax.Parser + + // out is a tmp buffer where we build a simplified regexp pattern. + out *strings.Builder + // score is a number of applied simplifications + score int +} + +func (c *regexpSimplifyChecker) VisitExpr(x ast.Expr) { + call, ok := x.(*ast.CallExpr) + if !ok { + return + } + + switch qualifiedName(call.Fun) { + case "regexp.Compile", "regexp.MustCompile": + cv := c.ctx.TypesInfo.Types[call.Args[0]].Value + if cv == nil || cv.Kind() != constant.String { + return + } + pat := constant.StringVal(cv) + if len(pat) > 60 { + // Skip scary regexp patterns for now. + break + } + + // Only do 2 passes. + simplified := pat + for pass := 0; pass < 2; pass++ { + candidate := c.simplify(pass, simplified) + if candidate == "" { + break + } + simplified = candidate + } + if simplified != "" && simplified != pat { + c.warn(call.Args[0], pat, simplified) + } + } +} + +func (c *regexpSimplifyChecker) simplify(pass int, pat string) string { + re, err := c.parser.Parse(pat) + if err != nil { + return "" + } + + c.score = 0 + c.out.Reset() + + // TODO(quasilyte): suggest char ranges for things like [012345689]? + // TODO(quasilyte): evaluate char range to suggest better replacements. + // TODO(quasilyte): (?:ab|ac) -> a[bc] + // TODO(quasilyte): suggest "s" and "." flag if things like [\w\W] are used. + // TODO(quasilyte): x{n}x? -> x{n,n+1} + + c.walk(re.Expr) + + if debug() { + // This happens only in one of two cases: + // 1. Parser has a bug and we got invalid AST for the given pattern. + // 2. Simplifier incorrectly built a replacement string from the AST. + if c.score == 0 && c.out.String() != pat { + log.Printf("pass %d: unexpected pattern diff:\n\thave: %q\n\twant: %q", + pass, c.out.String(), pat) + } + } + + if c.score > 0 { + return c.out.String() + } + return "" +} + +func (c *regexpSimplifyChecker) walk(e syntax.Expr) { + out := c.out + + switch e.Op { + case syntax.OpConcat: + c.walkConcat(e) + + case syntax.OpAlt: + c.walkAlt(e) + + case syntax.OpCharRange: + s := c.simplifyCharRange(e) + if s != "" { + out.WriteString(s) + c.score++ + } else { + out.WriteString(e.Value) + } + + case syntax.OpGroupWithFlags: + out.WriteString("(") + out.WriteString(e.Args[1].Value) + out.WriteString(":") + c.walk(e.Args[0]) + out.WriteString(")") + case syntax.OpGroup: + c.walkGroup(e) + case syntax.OpCapture: + out.WriteString("(") + c.walk(e.Args[0]) + out.WriteString(")") + case syntax.OpNamedCapture: + out.WriteString("(?P<") + out.WriteString(e.Args[1].Value) + out.WriteString(">") + c.walk(e.Args[0]) + out.WriteString(")") + + case syntax.OpRepeat: + // TODO(quasilyte): is it worth it to analyze repeat argument + // more closely and handle `{n,n} -> {n}` cases? + rep := e.Args[1].Value + switch rep { + case "{0,1}": + c.walk(e.Args[0]) + out.WriteString("?") + c.score++ + case "{1,}": + c.walk(e.Args[0]) + out.WriteString("+") + c.score++ + case "{0,}": + c.walk(e.Args[0]) + out.WriteString("*") + c.score++ + case "{0}": + // Maybe {0} should be reported by another check, regexpLint? + c.score++ + case "{1}": + c.walk(e.Args[0]) + c.score++ + default: + c.walk(e.Args[0]) + out.WriteString(rep) + } + + case syntax.OpPosixClass: + out.WriteString(e.Value) + + case syntax.OpNegCharClass: + s := c.simplifyNegCharClass(e) + if s != "" { + c.out.WriteString(s) + c.score++ + } else { + out.WriteString("[^") + for _, e := range e.Args { + c.walk(e) + } + out.WriteString("]") + } + + case syntax.OpCharClass: + s := c.simplifyCharClass(e) + if s != "" { + c.out.WriteString(s) + c.score++ + } else { + out.WriteString("[") + for _, e := range e.Args { + c.walk(e) + } + out.WriteString("]") + } + + case syntax.OpEscapeChar: + switch e.Value { + case `\&`, `\#`, `\!`, `\@`, `\%`, `\<`, `\>`, `\:`, `\;`, `\/`, `\,`, `\=`, `\.`: + c.score++ + out.WriteString(e.Value[len(`\`):]) + default: + out.WriteString(e.Value) + } + + case syntax.OpQuestion, syntax.OpNonGreedy: + c.walk(e.Args[0]) + out.WriteString("?") + case syntax.OpStar: + c.walk(e.Args[0]) + out.WriteString("*") + case syntax.OpPlus: + c.walk(e.Args[0]) + out.WriteString("+") + + default: + out.WriteString(e.Value) + } +} + +func (c *regexpSimplifyChecker) walkGroup(g syntax.Expr) { + switch g.Args[0].Op { + case syntax.OpChar, syntax.OpEscapeChar, syntax.OpEscapeMeta, syntax.OpCharClass: + c.walk(g.Args[0]) + c.score++ + return + } + + c.out.WriteString("(?:") + c.walk(g.Args[0]) + c.out.WriteString(")") +} + +func (c *regexpSimplifyChecker) simplifyNegCharClass(e syntax.Expr) string { + switch e.Value { + case `[^0-9]`: + return `\D` + case `[^\s]`: + return `\S` + case `[^\S]`: + return `\s` + case `[^\w]`: + return `\W` + case `[^\W]`: + return `\w` + case `[^\d]`: + return `\D` + case `[^\D]`: + return `\d` + case `[^[:^space:]]`: + return `\s` + case `[^[:space:]]`: + return `\S` + case `[^[:^word:]]`: + return `\w` + case `[^[:word:]]`: + return `\W` + case `[^[:^digit:]]`: + return `\d` + case `[^[:digit:]]`: + return `\D` + } + + return "" +} + +func (c *regexpSimplifyChecker) simplifyCharClass(e syntax.Expr) string { + switch e.Value { + case `[0-9]`: + return `\d` + case `[[:word:]]`: + return `\w` + case `[[:^word:]]`: + return `\W` + case `[[:digit:]]`: + return `\d` + case `[[:^digit:]]`: + return `\D` + case `[[:space:]]`: + return `\s` + case `[[:^space:]]`: + return `\S` + case `[][]`: + return `\]\[` + case `[]]`: + return `\]` + } + + if len(e.Args) == 1 { + switch e.Args[0].Op { + case syntax.OpChar: + switch v := e.Args[0].Value; v { + case "|", "*", "+", "?", ".", "[", "^", "$", "(", ")": + // Can't take outside of the char group without escaping. + default: + return v + } + case syntax.OpEscapeChar: + return e.Args[0].Value + } + } + + return "" +} + +func (c *regexpSimplifyChecker) canMerge(x, y syntax.Expr) bool { + if x.Op != y.Op { + return false + } + switch x.Op { + case syntax.OpChar, syntax.OpCharClass, syntax.OpEscapeMeta, syntax.OpEscapeChar, syntax.OpNegCharClass, syntax.OpGroup: + return x.Value == y.Value + default: + return false + } +} + +func (c *regexpSimplifyChecker) canCombine(x, y syntax.Expr) (threshold int, ok bool) { + if x.Op != y.Op { + return 0, false + } + + switch x.Op { + case syntax.OpDot: + return 3, true + + case syntax.OpChar: + if x.Value != y.Value { + return 0, false + } + if x.Value == " " { + return 1, true + } + return 4, true + + case syntax.OpEscapeMeta, syntax.OpEscapeChar: + if x.Value == y.Value { + return 2, true + } + + case syntax.OpCharClass, syntax.OpNegCharClass, syntax.OpGroup: + if x.Value == y.Value { + return 1, true + } + } + + return 0, false +} + +func (c *regexpSimplifyChecker) concatLiteral(e syntax.Expr) string { + if e.Op == syntax.OpConcat && c.allChars(e) { + return e.Value + } + return "" +} + +func (c *regexpSimplifyChecker) allChars(e syntax.Expr) bool { + for _, a := range e.Args { + if a.Op != syntax.OpChar { + return false + } + } + return true +} + +func (c *regexpSimplifyChecker) factorPrefixSuffix(alt syntax.Expr) bool { + // TODO: more forms of prefixes/suffixes? + // + // A more generalized algorithm could handle `fo|fo1|fo2` -> `fo[12]?`. + // but it's an open question whether the latter form universally better. + // + // Right now it handles only the simplest cases: + // `http|https` -> `https?` + // `xfoo|foo` -> `x?foo` + if len(alt.Args) != 2 { + return false + } + x := c.concatLiteral(alt.Args[0]) + y := c.concatLiteral(alt.Args[1]) + if x == y { + return false // Reject non-literals and identical strings early + } + + // Let x be a shorter string. + if len(x) > len(y) { + x, y = y, x + } + // Do we have a common prefix? + tail := strings.TrimPrefix(y, x) + if len(tail) <= utf8.UTFMax && utf8.RuneCountInString(tail) == 1 { + c.out.WriteString(x + tail + "?") + c.score++ + return true + } + // Do we have a common suffix? + head := strings.TrimSuffix(y, x) + if len(head) <= utf8.UTFMax && utf8.RuneCountInString(head) == 1 { + c.out.WriteString(head + "?" + x) + c.score++ + return true + } + return false +} + +func (c *regexpSimplifyChecker) walkAlt(alt syntax.Expr) { + // `x|y|z` -> `[xyz]`. + if c.allChars(alt) { + c.score++ + c.out.WriteString("[") + for _, e := range alt.Args { + c.out.WriteString(e.Value) + } + c.out.WriteString("]") + return + } + + if c.factorPrefixSuffix(alt) { + return + } + + for i, e := range alt.Args { + c.walk(e) + if i != len(alt.Args)-1 { + c.out.WriteString("|") + } + } +} + +func (c *regexpSimplifyChecker) walkConcat(concat syntax.Expr) { + i := 0 + for i < len(concat.Args) { + x := concat.Args[i] + c.walk(x) + i++ + + if i >= len(concat.Args) { + break + } + + // Try merging `xy*` into `x+` where x=y. + if concat.Args[i].Op == syntax.OpStar { + if c.canMerge(x, concat.Args[i].Args[0]) { + c.out.WriteString("+") + c.score++ + i++ + continue + } + } + + // Try combining `xy` into `x{2}` where x=y. + threshold, ok := c.canCombine(x, concat.Args[i]) + if !ok { + continue + } + n := 1 // Can combine at least 1 pair. + for j := i + 1; j < len(concat.Args); j++ { + _, ok := c.canCombine(x, concat.Args[j]) + if !ok { + break + } + n++ + } + if n >= threshold { + fmt.Fprintf(c.out, "{%d}", n+1) + c.score++ + i += n + } + } +} + +func (c *regexpSimplifyChecker) simplifyCharRange(rng syntax.Expr) string { + if rng.Args[0].Op != syntax.OpChar || rng.Args[1].Op != syntax.OpChar { + return "" + } + + lo := rng.Args[0].Value + hi := rng.Args[1].Value + if len(lo) == 1 && len(hi) == 1 { + switch hi[0] - lo[0] { + case 0: + return lo + case 1: + return fmt.Sprintf("%s%s", lo, hi) + case 2: + return fmt.Sprintf("%s%s%s", lo, string(lo[0]+1), hi) + } + } + + return "" +} + +func (c *regexpSimplifyChecker) warn(cause ast.Expr, orig, suggest string) { + c.ctx.Warn(cause, "can re-write `%s` as `%s`", orig, suggest) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go new file mode 100644 index 0000000..9be1f3c --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go @@ -0,0 +1,115 @@ +package checkers + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "io/ioutil" + "log" + "os" + "strings" + + "github.com/go-critic/go-critic/framework/linter" + "github.com/quasilyte/go-ruleguard/ruleguard" +) + +func init() { + var info linter.CheckerInfo + info.Name = "ruleguard" + info.Tags = []string{"style", "experimental"} + info.Params = linter.CheckerParams{ + "rules": { + Value: "", + Usage: "comma-separated list of gorule file paths", + }, + "debug": { + Value: "", + Usage: "enable debug for the specified named rules group", + }, + } + info.Summary = "Runs user-defined rules using ruleguard linter" + info.Details = "Reads a rules file and turns them into go-critic checkers." + info.Before = `N/A` + info.After = `N/A` + info.Note = "See https://github.com/quasilyte/go-ruleguard." + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return newRuleguardChecker(&info, ctx) + }) +} + +func newRuleguardChecker(info *linter.CheckerInfo, ctx *linter.CheckerContext) *ruleguardChecker { + c := &ruleguardChecker{ + ctx: ctx, + debugGroup: info.Params.String("debug"), + } + rulesFlag := info.Params.String("rules") + if rulesFlag == "" { + return c + } + + // TODO(quasilyte): handle initialization errors better when we make + // a transition to the go/analysis framework. + // + // For now, we log error messages and return a ruleguard checker + // with an empty rules set. + + fset := token.NewFileSet() + filenames := strings.Split(rulesFlag, ",") + var ruleSets []*ruleguard.GoRuleSet + for _, filename := range filenames { + filename = strings.TrimSpace(filename) + data, err := ioutil.ReadFile(filename) + if err != nil { + log.Printf("ruleguard init error: %+v", err) + return c + } + rset, err := ruleguard.ParseRules(filename, fset, bytes.NewReader(data)) + if err != nil { + log.Printf("ruleguard init error: %+v", err) + return c + } + ruleSets = append(ruleSets, rset) + } + + c.rset = ruleguard.MergeRuleSets(ruleSets) + return c +} + +type ruleguardChecker struct { + ctx *linter.CheckerContext + + debugGroup string + rset *ruleguard.GoRuleSet +} + +func (c *ruleguardChecker) WalkFile(f *ast.File) { + if c.rset == nil { + return + } + + ctx := &ruleguard.Context{ + Debug: c.debugGroup, + DebugPrint: func(s string) { + fmt.Fprintln(os.Stderr, s) + }, + Pkg: c.ctx.Pkg, + Types: c.ctx.TypesInfo, + Sizes: c.ctx.SizesInfo, + Fset: c.ctx.FileSet, + Report: func(_ ruleguard.GoRuleInfo, n ast.Node, msg string, _ *ruleguard.Suggestion) { + // TODO(quasilyte): investigate whether we should add a rule name as + // a message prefix here. + c.ctx.Warn(n, msg) + }, + } + + err := ruleguard.RunRules(ctx, f, c.rset) + if err != nil { + // Normally this should never happen, but since + // we don't have a better mechanism to report errors, + // emit a warning. + c.ctx.Warn(f, "execution error: %v", err) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go b/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go index 6cdb06a..abead3f 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go @@ -2,13 +2,15 @@ package checkers import ( "go/ast" + "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "golang.org/x/tools/go/ast/astutil" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "singleCaseSwitch" info.Tags = []string{"style"} info.Summary = "Detects switch statements that could be better written as if statement" @@ -22,14 +24,14 @@ if x, ok := x.(int); ok { body() }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&singleCaseSwitchChecker{ctx: ctx}) }) } type singleCaseSwitchChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *singleCaseSwitchChecker) VisitStmt(stmt ast.Stmt) { @@ -42,16 +44,37 @@ func (c *singleCaseSwitchChecker) VisitStmt(stmt ast.Stmt) { } func (c *singleCaseSwitchChecker) checkSwitchStmt(stmt ast.Stmt, body *ast.BlockStmt) { - if len(body.List) == 1 { - if body.List[0].(*ast.CaseClause).List == nil { - // default case. - c.warnDefault(stmt) - } else if len(body.List[0].(*ast.CaseClause).List) == 1 { - c.warn(stmt) - } + if len(body.List) != 1 { + return + } + cc := body.List[0].(*ast.CaseClause) + if c.hasBreak(cc) { + return + } + switch { + case cc.List == nil: + c.warnDefault(stmt) + case len(cc.List) == 1: + c.warn(stmt) } } +func (c *singleCaseSwitchChecker) hasBreak(stmt ast.Stmt) bool { + found := false + astutil.Apply(stmt, func(cur *astutil.Cursor) bool { + switch n := cur.Node().(type) { + case *ast.BranchStmt: + if n.Tok == token.BREAK { + found = true + } + case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt, *ast.SwitchStmt: + return false + } + return true + }, nil) + return found +} + func (c *singleCaseSwitchChecker) warn(stmt ast.Stmt) { c.ctx.Warn(stmt, "should rewrite switch statement to if statement") } diff --git a/vendor/github.com/go-critic/go-critic/checkers/sloppyLen_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sloppyLen_checker.go index 45123ec..e12545f 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/sloppyLen_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/sloppyLen_checker.go @@ -4,14 +4,14 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astfmt" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "sloppyLen" info.Tags = []string{"style"} info.Summary = "Detects usage of `len` when result is obvious or doesn't make sense" @@ -23,14 +23,14 @@ len(arr) < 0 // Doesn't make sense at all` len(arr) > 0 len(arr) == 0` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&sloppyLenChecker{ctx: ctx}) }) } type sloppyLenChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *sloppyLenChecker) VisitExpr(x ast.Expr) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go index 1a7c198..8663703 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go @@ -4,29 +4,29 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "sloppyReassign" info.Tags = []string{"diagnostic", "experimental"} info.Summary = "Detects suspicious/confusing re-assignments" info.Before = `if err = f(); err != nil { return err }` info.After = `if err := f(); err != nil { return err }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&sloppyReassignChecker{ctx: ctx}) }) } type sloppyReassignChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *sloppyReassignChecker) VisitStmt(stmt ast.Stmt) { @@ -37,12 +37,12 @@ func (c *sloppyReassignChecker) VisitStmt(stmt ast.Stmt) { return } - // TODO(Quasilyte): is handling of multi-value assignments worthwhile? + // TODO(quasilyte): is handling of multi-value assignments worthwhile? if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { return } - // TODO(Quasilyte): handle not only the simplest, return-only case. + // TODO(quasilyte): handle not only the simplest, return-only case. body := ifStmt.Body.List if len(body) != 1 { return @@ -54,7 +54,7 @@ func (c *sloppyReassignChecker) VisitStmt(stmt ast.Stmt) { return } - // TODO(Quasilyte): handle not only nil comparisons. + // TODO(quasilyte): handle not only nil comparisons. eqToNil := &ast.BinaryExpr{ Op: token.NEQ, X: reAssigned, diff --git a/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go new file mode 100644 index 0000000..d89daac --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go @@ -0,0 +1,75 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "sloppyTypeAssert" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects redundant type assertions" + info.Before = ` +function f(r io.Reader) interface{} { + return r.(interface{}) +} +` + info.After = ` +function f(r io.Reader) interface{} { + return r +} +` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return astwalk.WalkerForExpr(&sloppyTypeAssertChecker{ctx: ctx}) + }) +} + +type sloppyTypeAssertChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *sloppyTypeAssertChecker) VisitExpr(expr ast.Expr) { + assert := astcast.ToTypeAssertExpr(expr) + if assert.Type == nil { + return + } + + toType := c.ctx.TypeOf(expr) + fromType := c.ctx.TypeOf(assert.X) + + if types.Identical(toType, fromType) { + c.warnIdentical(expr) + return + } + + toIface, ok := toType.Underlying().(*types.Interface) + if !ok { + return + } + + switch { + case toIface.Empty(): + c.warnEmpty(expr) + case types.Implements(fromType, toIface): + c.warnImplements(expr, assert.X) + } +} + +func (c *sloppyTypeAssertChecker) warnIdentical(cause ast.Expr) { + c.ctx.Warn(cause, "type assertion from/to types are identical") +} + +func (c *sloppyTypeAssertChecker) warnEmpty(cause ast.Expr) { + c.ctx.Warn(cause, "type assertion to interface{} may be redundant") +} + +func (c *sloppyTypeAssertChecker) warnImplements(cause, val ast.Expr) { + c.ctx.Warn(cause, "type assertion may be redundant as %s always implements selected interface", val) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go new file mode 100644 index 0000000..829feea --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go @@ -0,0 +1,135 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "sortSlice" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects suspicious sort.Slice calls" + info.Before = `sort.Slice(xs, func(i, j) bool { return keys[i] < keys[j] })` + info.After = `sort.Slice(kv, func(i, j) bool { return kv[i].key < kv[j].key })` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return astwalk.WalkerForExpr(&sortSliceChecker{ctx: ctx}) + }) +} + +type sortSliceChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *sortSliceChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + if len(call.Args) != 2 { + return + } + switch qualifiedName(call.Fun) { + case "sort.Slice", "sort.SliceStable": + // OK. + default: + return + } + + slice := c.unwrapSlice(call.Args[0]) + lessFunc, ok := call.Args[1].(*ast.FuncLit) + if !ok { + return + } + if !typep.SideEffectFree(c.ctx.TypesInfo, slice) { + return // Don't check unpredictable slice values + } + + ivar, jvar := c.paramIdents(lessFunc.Type) + if ivar == nil || jvar == nil { + return + } + + if len(lessFunc.Body.List) != 1 { + return + } + ret, ok := lessFunc.Body.List[0].(*ast.ReturnStmt) + if !ok { + return + } + cmp := astcast.ToBinaryExpr(astutil.Unparen(ret.Results[0])) + if !typep.SideEffectFree(c.ctx.TypesInfo, cmp) { + return + } + switch cmp.Op { + case token.LSS, token.LEQ, token.GTR, token.GEQ: + // Both cmp.X and cmp.Y are expected to be some expressions + // over the `slice` expression. In the simplest case, + // it's a `slice[i] slice[j]`. + if !c.containsSlice(cmp.X, slice) && !c.containsSlice(cmp.Y, slice) { + c.warnSlice(cmp, slice) + } + + // This one is more about the style, but can reveal potential issue + // or misprint in sorting condition. + // We give a warn if X contains indexing with `i` index and Y + // contains indexing with `j`. + if c.containsIndex(cmp.X, jvar) && c.containsIndex(cmp.Y, ivar) { + c.warnIndex(cmp, ivar, jvar) + } + } +} + +func (c *sortSliceChecker) paramIdents(e *ast.FuncType) (ivar, jvar *ast.Ident) { + // Covers both `i, j int` and `i int, j int`. + idents := make([]*ast.Ident, 0, 2) + for _, field := range e.Params.List { + idents = append(idents, field.Names...) + } + if len(idents) == 2 { + return idents[0], idents[1] + } + return nil, nil +} + +func (c *sortSliceChecker) unwrapSlice(e ast.Expr) ast.Expr { + switch e := e.(type) { + case *ast.ParenExpr: + return c.unwrapSlice(e.X) + case *ast.SliceExpr: + return e.X + default: + return e + } +} + +func (c *sortSliceChecker) containsIndex(e, index ast.Expr) bool { + return lintutil.ContainsNode(e, func(n ast.Node) bool { + indexing, ok := n.(*ast.IndexExpr) + if !ok { + return false + } + return astequal.Expr(indexing.Index, index) + }) +} + +func (c *sortSliceChecker) containsSlice(e, slice ast.Expr) bool { + return lintutil.ContainsNode(e, func(n ast.Node) bool { + return astequal.Node(n, slice) + }) +} + +func (c *sortSliceChecker) warnSlice(cause ast.Node, slice ast.Expr) { + c.ctx.Warn(cause, "cmp func must use %s slice in comparison", slice) +} + +func (c *sortSliceChecker) warnIndex(cause ast.Node, ivar, jvar *ast.Ident) { + c.ctx.Warn(cause, "unusual order of {%s,%s} params in comparison", ivar, jvar) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go new file mode 100644 index 0000000..fa632cd --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go @@ -0,0 +1,167 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "sqlQuery" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects issue in Query() and Exec() calls" + info.Before = `_, err := db.Query("UPDATE ...")` + info.After = `_, err := db.Exec("UPDATE ...")` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return astwalk.WalkerForStmt(&sqlQueryChecker{ctx: ctx}) + }) +} + +type sqlQueryChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *sqlQueryChecker) VisitStmt(stmt ast.Stmt) { + assign := astcast.ToAssignStmt(stmt) + if len(assign.Lhs) != 2 { // Query() has 2 return values. + return + } + if len(assign.Rhs) != 1 { + return + } + + // If Query() is called, but first return value is ignored, + // there is no way to close/read the returned rows. + // This can cause a connection leak. + if id, ok := assign.Lhs[0].(*ast.Ident); ok && id.Name != "_" { + return + } + + call := astcast.ToCallExpr(assign.Rhs[0]) + funcExpr := astcast.ToSelectorExpr(call.Fun) + if !c.funcIsQuery(funcExpr) { + return + } + + if c.typeHasExecMethod(c.ctx.TypeOf(funcExpr.X)) { + c.warnAndSuggestExec(funcExpr) + } else { + c.warnRowsIgnored(funcExpr) + } +} + +func (c *sqlQueryChecker) funcIsQuery(funcExpr *ast.SelectorExpr) bool { + if funcExpr.Sel == nil { + return false + } + switch funcExpr.Sel.Name { + case "Query", "QueryContext": + // Stdlib and friends. + case "Queryx", "QueryxContext": + // sqlx. + default: + return false + } + + // To avoid false positives (unrelated types can have Query method) + // check that the 1st returned type has Row-like name. + typ, ok := c.ctx.TypeOf(funcExpr).Underlying().(*types.Signature) + if !ok || typ.Results() == nil || typ.Results().Len() != 2 { + return false + } + if !c.typeIsRowsLike(typ.Results().At(0).Type()) { + return false + } + + return true +} + +func (c *sqlQueryChecker) typeIsRowsLike(typ types.Type) bool { + switch typ := typ.(type) { + case *types.Pointer: + return c.typeIsRowsLike(typ.Elem()) + case *types.Named: + return typ.Obj().Name() == "Rows" + default: + return false + } +} + +func (c *sqlQueryChecker) funcIsExec(fn *types.Func) bool { + if fn.Name() != "Exec" { + return false + } + + // Expect exactly 2 results. + sig := fn.Type().(*types.Signature) + if sig.Results() == nil || sig.Results().Len() != 2 { + return false + } + + // Expect at least 1 param and it should be a string (query). + params := sig.Params() + if params == nil || params.Len() == 0 { + return false + } + if typ, ok := params.At(0).Type().(*types.Basic); !ok || typ.Kind() != types.String { + return false + } + + return true +} + +func (c *sqlQueryChecker) typeHasExecMethod(typ types.Type) bool { + switch typ := typ.(type) { + case *types.Struct: + for i := 0; i < typ.NumFields(); i++ { + if c.typeHasExecMethod(typ.Field(i).Type()) { + return true + } + } + case *types.Interface: + for i := 0; i < typ.NumMethods(); i++ { + if c.funcIsExec(typ.Method(i)) { + return true + } + } + case *types.Pointer: + return c.typeHasExecMethod(typ.Elem()) + case *types.Named: + for i := 0; i < typ.NumMethods(); i++ { + if c.funcIsExec(typ.Method(i)) { + return true + } + } + switch ut := typ.Underlying().(type) { + case *types.Interface: + return c.typeHasExecMethod(ut) + case *types.Struct: + // Check embedded types. + for i := 0; i < ut.NumFields(); i++ { + field := ut.Field(i) + if !field.Embedded() { + continue + } + if c.typeHasExecMethod(field.Type()) { + return true + } + } + } + } + + return false +} + +func (c *sqlQueryChecker) warnAndSuggestExec(funcExpr *ast.SelectorExpr) { + c.ctx.Warn(funcExpr, "use %s.Exec() if returned result is not needed", funcExpr.X) +} + +func (c *sqlQueryChecker) warnRowsIgnored(funcExpr *ast.SelectorExpr) { + c.ctx.Warn(funcExpr, "ignoring Query() rows result may lead to a connection leak") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/stringXbytes_checker.go b/vendor/github.com/go-critic/go-critic/checkers/stringXbytes_checker.go index 7457010..1617831 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/stringXbytes_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/stringXbytes_checker.go @@ -3,32 +3,32 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "stringXbytes" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{"style"} info.Summary = "Detects redundant conversions between string and []byte" info.Before = `copy(b, []byte(s))` info.After = `copy(b, s)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&stringXbytes{ctx: ctx}) }) } type stringXbytes struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *stringXbytes) VisitExpr(expr ast.Expr) { x, ok := expr.(*ast.CallExpr) - if !ok || qualifiedName(x.Fun) != "copy" { + if !ok || qualifiedName(x.Fun) != "copy" || len(x.Args) != 2 { return } @@ -36,7 +36,7 @@ func (c *stringXbytes) VisitExpr(expr ast.Expr) { byteCast, ok := src.(*ast.CallExpr) if ok && typep.IsTypeExpr(c.ctx.TypesInfo, byteCast.Fun) && - typep.HasStringProp(c.ctx.TypesInfo.TypeOf(byteCast.Args[0])) { + typep.HasStringProp(c.ctx.TypeOf(byteCast.Args[0])) { c.warn(byteCast, byteCast.Args[0]) } diff --git a/vendor/github.com/go-critic/go-critic/checkers/switchTrue_checker.go b/vendor/github.com/go-critic/go-critic/checkers/switchTrue_checker.go index 3b27662..5390360 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/switchTrue_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/switchTrue_checker.go @@ -3,12 +3,12 @@ package checkers import ( "go/ast" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "switchTrue" info.Tags = []string{"style"} info.Summary = "Detects switch-over-bool statements that use explicit `true` tag value" @@ -21,14 +21,14 @@ switch { case x > y: }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&switchTrueChecker{ctx: ctx}) }) } type switchTrueChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *switchTrueChecker) VisitStmt(stmt ast.Stmt) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go b/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go index f4cb9e8..2f7b431 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go @@ -5,17 +5,17 @@ import ( "go/token" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astp" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "truncateCmp" info.Tags = []string{"diagnostic", "experimental"} - info.Params = lintpack.CheckerParams{ + info.Params = linter.CheckerParams{ "skipArchDependent": { Value: true, Usage: "whether to skip int/uint/uintptr types", @@ -31,7 +31,7 @@ func f(x int32, int16) bool { return x < int32(y) }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &truncateCmpChecker{ctx: ctx} c.skipArchDependent = info.Params.Bool("skipArchDependent") return astwalk.WalkerForExpr(c) @@ -40,7 +40,7 @@ func f(x int32, int16) bool { type truncateCmpChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext skipArchDependent bool } @@ -87,11 +87,11 @@ func (c *truncateCmpChecker) checkCmp(cmpX, cmpY ast.Expr) { y := cmpY // Check that both x and y are signed or unsigned int-typed. - xtyp, ok := c.ctx.TypesInfo.TypeOf(x).Underlying().(*types.Basic) + xtyp, ok := c.ctx.TypeOf(x).Underlying().(*types.Basic) if !ok || xtyp.Info()&types.IsInteger == 0 { return } - ytyp, ok := c.ctx.TypesInfo.TypeOf(y).Underlying().(*types.Basic) + ytyp, ok := c.ctx.TypeOf(y).Underlying().(*types.Basic) if !ok || xtyp.Info() != ytyp.Info() { return } diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go index c0c42e3..2940e57 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go @@ -4,16 +4,16 @@ import ( "go/ast" "go/token" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/astp" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "typeAssertChain" info.Tags = []string{"style", "experimental"} info.Summary = "Detects repeated type assertions and suggests to replace them with type switch statement" @@ -35,14 +35,14 @@ default: // Code C, uses x. }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&typeAssertChainChecker{ctx: ctx}) }) } type typeAssertChainChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext cause *ast.IfStmt visited map[*ast.IfStmt]bool diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go new file mode 100644 index 0000000..1079eae --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go @@ -0,0 +1,88 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "typeDefFirst" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects method declarations preceding the type definition itself" + info.Before = ` +func (r rec) Method() {} +type rec struct{} +` + info.After = ` +type rec struct{} +func (r rec) Method() {} +` + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return &typeDefFirstChecker{ + ctx: ctx, + } + }) +} + +type typeDefFirstChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + trackedTypes map[string]bool +} + +func (c *typeDefFirstChecker) WalkFile(f *ast.File) { + if len(f.Decls) == 0 { + return + } + + c.trackedTypes = make(map[string]bool) + for _, decl := range f.Decls { + c.walkDecl(decl) + } +} + +func (c *typeDefFirstChecker) walkDecl(decl ast.Decl) { + switch decl := decl.(type) { + case *ast.FuncDecl: + if decl.Recv == nil { + return + } + receiver := decl.Recv.List[0] + typeName := c.receiverType(receiver.Type) + c.trackedTypes[typeName] = true + + case *ast.GenDecl: + if decl.Tok != token.TYPE { + return + } + for _, spec := range decl.Specs { + spec, ok := spec.(*ast.TypeSpec) + if !ok { + return + } + typeName := spec.Name.Name + if val, ok := c.trackedTypes[typeName]; ok && val { + c.warn(decl, typeName) + } + } + } +} + +func (c *typeDefFirstChecker) receiverType(e ast.Expr) string { + switch e := e.(type) { + case *ast.StarExpr: + return c.receiverType(e.X) + case *ast.Ident: + return e.Name + default: + panic("unreachable") + } +} + +func (c *typeDefFirstChecker) warn(cause ast.Node, typeName string) { + c.ctx.Warn(cause, "definition of type '%s' should appear before its methods", typeName) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go index a113597..2ade4a9 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go @@ -3,15 +3,15 @@ package checkers import ( "go/ast" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/astp" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "typeSwitchVar" info.Tags = []string{"style"} info.Summary = "Detects type switches that can benefit from type guard clause with variable" @@ -34,18 +34,20 @@ default: return 0 }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&typeSwitchVarChecker{ctx: ctx}) }) } type typeSwitchVarChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext + count int } func (c *typeSwitchVarChecker) VisitStmt(stmt ast.Stmt) { if stmt, ok := stmt.(*ast.TypeSwitchStmt); ok { + c.count = 0 c.checkTypeSwitch(stmt) } } @@ -61,7 +63,7 @@ func (c *typeSwitchVarChecker) checkTypeSwitch(root *ast.TypeSwitchStmt) { return // Give up: can't handle shadowing without object } - for i, clause := range root.Body.List { + for _, clause := range root.Body.List { clause := clause.(*ast.CaseClause) // Multiple types in a list mean that assert.X will have // a type of interface{} inside clause body. @@ -76,13 +78,20 @@ func (c *typeSwitchVarChecker) checkTypeSwitch(root *ast.TypeSwitchStmt) { return astequal.Node(&assert1, x) }) if object == c.ctx.TypesInfo.ObjectOf(identOf(assert2)) { - c.warn(root, i) + c.count++ break } } } + if c.count > 0 { + c.warn(root) + } } -func (c *typeSwitchVarChecker) warn(n ast.Node, caseIndex int) { - c.ctx.Warn(n, "case %d can benefit from type switch with assignment", caseIndex) +func (c *typeSwitchVarChecker) warn(n ast.Node) { + msg := "case" + if c.count > 1 { + msg = "cases" + } + c.ctx.Warn(n, "%d "+msg+" can benefit from type switch with assignment", c.count) } diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go index a17c77b..620d2d7 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go @@ -3,30 +3,30 @@ package checkers import ( "go/ast" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astp" "golang.org/x/tools/go/ast/astutil" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "typeUnparen" info.Tags = []string{"style", "opinionated"} info.Summary = "Detects unneded parenthesis inside type expressions and suggests to remove them" info.Before = `type foo [](func([](func())))` info.After = `type foo []func([]func())` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForTypeExpr(&typeUnparenChecker{ctx: ctx}, ctx.TypesInfo) }) } type typeUnparenChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *typeUnparenChecker) VisitTypeExpr(x ast.Expr) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go b/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go index dfc6077..6f12e2e 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go @@ -4,17 +4,17 @@ import ( "go/ast" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astp" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "underef" info.Tags = []string{"style"} - info.Params = lintpack.CheckerParams{ + info.Params = linter.CheckerParams{ "skipRecvDeref": { Value: true, Usage: "whether to skip (*x).method() calls where x is a pointer receiver", @@ -28,7 +28,7 @@ v := (*a)[5] // only if a is array` k.field = 5 v := a[5]` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &underefChecker{ctx: ctx} c.skipRecvDeref = info.Params.Bool("skipRecvDeref") return astwalk.WalkerForExpr(c) @@ -37,7 +37,7 @@ v := a[5]` type underefChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext skipRecvDeref bool } @@ -69,7 +69,7 @@ func (c *underefChecker) VisitExpr(expr ast.Expr) { } func (c *underefChecker) isPtrRecvMethodCall(fn *ast.Ident) bool { - typ, ok := c.ctx.TypesInfo.TypeOf(fn).(*types.Signature) + typ, ok := c.ctx.TypeOf(fn).(*types.Signature) if ok && typ != nil && typ.Recv() != nil { _, ok := typ.Recv().Type().(*types.Pointer) return ok @@ -104,7 +104,7 @@ func (c *underefChecker) warnArray(expr *ast.IndexExpr) { // checkStarExpr checks if ast.StarExpr could be simplified. func (c *underefChecker) checkStarExpr(expr *ast.StarExpr) bool { - typ, ok := c.ctx.TypesInfo.TypeOf(expr.X).Underlying().(*types.Pointer) + typ, ok := c.ctx.TypeOf(expr.X).Underlying().(*types.Pointer) if !ok { return false } @@ -118,7 +118,7 @@ func (c *underefChecker) checkStarExpr(expr *ast.StarExpr) bool { } func (c *underefChecker) checkArray(expr *ast.StarExpr) bool { - typ, ok := c.ctx.TypesInfo.TypeOf(expr.X).(*types.Pointer) + typ, ok := c.ctx.TypeOf(expr.X).(*types.Pointer) if !ok { return false } diff --git a/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go index d90c65c..b9b8dff 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "go/token" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "unlabelStmt" info.Tags = []string{"style", "experimental"} info.Summary = "Detects redundant statement labels" @@ -28,21 +28,21 @@ for x := range xs { } }` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmt(&unlabelStmtChecker{ctx: ctx}) }) } type unlabelStmtChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *unlabelStmtChecker) EnterFunc(fn *ast.FuncDecl) bool { if fn.Body == nil { return false } - // TODO(Quasilyte): should not do additional traversal here. + // TODO(quasilyte): should not do additional traversal here. // For now, skip all functions that contain goto statement. return !lintutil.ContainsNode(fn.Body, func(n ast.Node) bool { br, ok := n.(*ast.BranchStmt) diff --git a/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go index 9e01299..33e1be9 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go @@ -2,30 +2,33 @@ package checkers import ( "go/ast" + "go/token" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "unlambda" info.Tags = []string{"style"} info.Summary = "Detects function literals that can be simplified" info.Before = `func(x int) int { return fn(x) }` info.After = `fn` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&unlambdaChecker{ctx: ctx}) }) } type unlambdaChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *unlambdaChecker) VisitExpr(x ast.Expr) { @@ -47,20 +50,38 @@ func (c *unlambdaChecker) VisitExpr(x ast.Expr) { if isBuiltin(callable) { return // See #762 } - if id, ok := result.Fun.(*ast.Ident); ok { - obj := c.ctx.TypesInfo.ObjectOf(id) - if _, ok := obj.(*types.Var); ok { - return // See #888 + hasVars := lintutil.ContainsNode(result.Fun, func(n ast.Node) bool { + id, ok := n.(*ast.Ident) + if !ok { + return false } + obj, ok := c.ctx.TypesInfo.ObjectOf(id).(*types.Var) + if !ok { + return false + } + // Permit only non-pointer struct method values. + return !typep.IsStruct(obj.Type().Underlying()) + }) + if hasVars { + return // See #888 #1007 } - fnType := c.ctx.TypesInfo.TypeOf(fn) - resultType := c.ctx.TypesInfo.TypeOf(result.Fun) + + fnType := c.ctx.TypeOf(fn) + resultType := c.ctx.TypeOf(result.Fun) if !types.Identical(fnType, resultType) { return } // Now check that all arguments match the parameters. n := 0 for _, params := range fn.Type.Params.List { + if _, ok := params.Type.(*ast.Ellipsis); ok { + if result.Ellipsis == token.NoPos { + return + } + n++ + continue + } + for _, id := range params.Names { if !astequal.Expr(id, result.Args[n]) { return diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go index 0942325..e86ccb0 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go @@ -4,15 +4,15 @@ import ( "go/ast" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "unnamedResult" info.Tags = []string{"style", "opinionated", "experimental"} - info.Params = lintpack.CheckerParams{ + info.Params = linter.CheckerParams{ "checkExported": { Value: false, Usage: "whether to check exported functions", @@ -22,7 +22,7 @@ func init() { info.Before = `func f() (float64, float64)` info.After = `func f() (x, y float64)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { c := &unnamedResultChecker{ctx: ctx} c.checkExported = info.Params.Bool("checkExported") return astwalk.WalkerForFuncDecl(c) @@ -31,7 +31,7 @@ func init() { type unnamedResultChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext checkExported bool } @@ -48,7 +48,7 @@ func (c *unnamedResultChecker) VisitFuncDecl(decl *ast.FuncDecl) { return // Skip named results } - typeName := func(x ast.Expr) string { return c.typeName(c.ctx.TypesInfo.TypeOf(x)) } + typeName := func(x ast.Expr) string { return c.typeName(c.ctx.TypeOf(x)) } isError := func(x ast.Expr) bool { return qualifiedName(x) == "error" } isBool := func(x ast.Expr) bool { return qualifiedName(x) == "bool" } diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go index e5dc45f..acc9fad 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go @@ -4,12 +4,12 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "unnecessaryBlock" info.Tags = []string{"style", "opinionated", "experimental"} info.Summary = "Detects unnecessary braced statement blocks" @@ -22,14 +22,14 @@ x := 1 x := 1 print(x)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmtList(&unnecessaryBlockChecker{ctx: ctx}) }) } type unnecessaryBlockChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *unnecessaryBlockChecker) VisitStmtList(statements []ast.Stmt) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go new file mode 100644 index 0000000..ee706e0 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go @@ -0,0 +1,111 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astfmt" +) + +func init() { + var info linter.CheckerInfo + info.Name = "unnecessaryDefer" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects redundantly deferred calls" + info.Before = ` +func() { + defer os.Remove(filename) +}` + info.After = ` +func() { + os.Remove(filename) +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { + return astwalk.WalkerForFuncDecl(&unnecessaryDeferChecker{ctx: ctx}) + }) +} + +type unnecessaryDeferChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + isFunc bool +} + +// Visit implements the ast.Visitor. This visitor keeps track of the block +// statement belongs to a function or any other block. If the block is not a +// function and ends with a defer statement that should be OK since it's +// defering the outer function. +func (c *unnecessaryDeferChecker) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl, *ast.FuncLit: + c.isFunc = true + case *ast.BlockStmt: + c.checkDeferBeforeReturn(n) + default: + c.isFunc = false + } + + return c +} + +func (c *unnecessaryDeferChecker) VisitFuncDecl(funcDecl *ast.FuncDecl) { + // We always start as a function (*ast.FuncDecl.Body passed) + c.isFunc = true + + ast.Walk(c, funcDecl.Body) +} + +func (c *unnecessaryDeferChecker) checkDeferBeforeReturn(funcDecl *ast.BlockStmt) { + // Check if we have an explicit return or if it's just the end of the scope. + explicitReturn := false + retIndex := len(funcDecl.List) + for i, stmt := range funcDecl.List { + retStmt, ok := stmt.(*ast.ReturnStmt) + if !ok { + continue + } + explicitReturn = true + if !c.isTrivialReturn(retStmt) { + continue + } + retIndex = i + break + } + if retIndex == 0 { + return + } + + if deferStmt, ok := funcDecl.List[retIndex-1].(*ast.DeferStmt); ok { + // If the block is a function and ending with return or if we have an + // explicit return in any other block we should warn about + // unnecessary defer. + if c.isFunc || explicitReturn { + c.warn(deferStmt) + } + } +} + +func (c *unnecessaryDeferChecker) isTrivialReturn(ret *ast.ReturnStmt) bool { + for _, e := range ret.Results { + if !c.isConstExpr(e) { + return false + } + } + return true +} + +func (c *unnecessaryDeferChecker) isConstExpr(e ast.Expr) bool { + return c.ctx.TypesInfo.Types[e].Value != nil +} + +func (c *unnecessaryDeferChecker) warn(deferStmt *ast.DeferStmt) { + s := astfmt.Sprint(deferStmt) + if fnlit, ok := deferStmt.Call.Fun.(*ast.FuncLit); ok { + // To avoid long and multi-line warning messages, + // collapse the function literals. + s = "defer " + astfmt.Sprint(fnlit.Type) + "{...}(...)" + } + c.ctx.Warn(deferStmt, "%s is placed just before return", s) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unslice_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unslice_checker.go index 06d9081..e026b05 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/unslice_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/unslice_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "go/types" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astequal" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "unslice" info.Tags = []string{"style"} info.Summary = "Detects slice expressions that can be simplified to sliced expression itself" @@ -21,14 +21,14 @@ copy(b[:], values...) // b is []byte` f(s) copy(b, values...)` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&unsliceChecker{ctx: ctx}) }) } type unsliceChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *unsliceChecker) VisitExpr(expr ast.Expr) { @@ -46,7 +46,7 @@ func (c *unsliceChecker) unslice(expr ast.Expr) ast.Expr { // because it's only permitted if expr.High is not nil. return expr } - switch c.ctx.TypesInfo.TypeOf(slice.X).(type) { + switch c.ctx.TypeOf(slice.X).(type) { case *types.Slice, *types.Basic: // Basic kind catches strings, Slice cathes everything else. return c.unslice(slice.X) diff --git a/vendor/github.com/go-critic/go-critic/checkers/utils.go b/vendor/github.com/go-critic/go-critic/checkers/utils.go index ba4777d..b71f24d 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/utils.go +++ b/vendor/github.com/go-critic/go-critic/checkers/utils.go @@ -5,7 +5,7 @@ import ( "go/types" "strings" - "github.com/go-lintpack/lintpack" + "github.com/go-critic/go-critic/framework/linter" ) // goStdlib contains `go list std` command output list. @@ -247,7 +247,7 @@ func isExampleTestFunc(fn *ast.FuncDecl) bool { } // isUnitTestFunc reports whether FuncDecl declares testing function. -func isUnitTestFunc(ctx *lintpack.CheckerContext, fn *ast.FuncDecl) bool { +func isUnitTestFunc(ctx *linter.CheckerContext, fn *ast.FuncDecl) bool { if !strings.HasPrefix(fn.Name.Name, "Test") { return false } diff --git a/vendor/github.com/go-critic/go-critic/checkers/valSwap_checker.go b/vendor/github.com/go-critic/go-critic/checkers/valSwap_checker.go index ab27f92..4dd494e 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/valSwap_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/valSwap_checker.go @@ -4,16 +4,16 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "valSwap" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{"style"} info.Summary = "Detects value swapping code that are not using parallel assignment" info.Before = ` tmp := *x @@ -21,14 +21,14 @@ tmp := *x *y = tmp` info.After = `*x, *y = *y, *x` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForStmtList(&valSwapChecker{ctx: ctx}) }) } type valSwapChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *valSwapChecker) VisitStmtList(list []ast.Stmt) { diff --git a/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go b/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go index fcd9aee..eff6c30 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go @@ -4,9 +4,9 @@ import ( "go/ast" "go/token" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" @@ -14,21 +14,21 @@ import ( ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "weakCond" info.Tags = []string{"diagnostic", "experimental"} info.Summary = "Detects conditions that are unsafe due to not being exhaustive" info.Before = `xs != nil && xs[0] != nil` info.After = `len(xs) != 0 && xs[0] != nil` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForExpr(&weakCondChecker{ctx: ctx}) }) } type weakCondChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *weakCondChecker) VisitExpr(expr ast.Expr) { @@ -46,7 +46,7 @@ func (c *weakCondChecker) VisitExpr(expr ast.Expr) { // lhs is `x nil` x := lhs.X - if !typep.IsSlice(c.ctx.TypesInfo.TypeOf(x)) { + if !typep.IsSlice(c.ctx.TypeOf(x)) { return } if astcast.ToIdent(lhs.Y).Name != "nil" { diff --git a/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go b/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go index 52fefb8..9c84ccd 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go @@ -5,12 +5,12 @@ import ( "regexp" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" ) func init() { - info := lintpack.CheckerInfo{ + info := linter.CheckerInfo{ Name: "whyNoLint", Tags: []string{"style", "experimental"}, Summary: "Ensures that `//nolint` comments include an explanation", @@ -19,7 +19,7 @@ func init() { } re := regexp.MustCompile(`^// *nolint(?::[^ ]+)? *(.*)$`) - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForComment(&whyNoLintChecker{ ctx: ctx, re: re, @@ -30,7 +30,7 @@ func init() { type whyNoLintChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext re *regexp.Regexp } @@ -44,7 +44,7 @@ func (c whyNoLintChecker) VisitComment(cg *ast.CommentGroup) { continue } - if s := sl[1]; !strings.HasPrefix(s, "//") || len(strings.TrimPrefix(s, "//")) == 0 { + if s := sl[1]; !strings.HasPrefix(s, "//") || strings.TrimPrefix(s, "//") == "" { c.ctx.Warn(cg, "include an explanation for nolint directive") return } diff --git a/vendor/github.com/go-critic/go-critic/checkers/wrapperFunc_checker.go b/vendor/github.com/go-critic/go-critic/checkers/wrapperFunc_checker.go index bba82e5..8bd2ad4 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/wrapperFunc_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/wrapperFunc_checker.go @@ -6,20 +6,20 @@ import ( "go/types" "strings" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "wrapperFunc" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{"style"} info.Summary = "Detects function calls that can be replaced with convenience wrappers" info.Before = `wg.Add(-1)` info.After = `wg.Done()` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { type arg struct { index int value string @@ -81,6 +81,11 @@ func init() { "bytes.Map => bytes.ToTitle": { {0, "unicode.ToTitle"}, }, + + "draw.DrawMask => draw.Draw": { + {4, "nil"}, + {5, "image.Point{}"}, + }, } matchers := make(map[string]*matcher) @@ -185,7 +190,7 @@ func init() { } } - typ := c.ctx.TypesInfo.TypeOf(x) + typ := c.ctx.TypeOf(x) tn, ok := typ.(*types.Named) if !ok { return "" @@ -203,7 +208,7 @@ func init() { type wrapperFuncChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext findSuggestion func(*ast.CallExpr) string } diff --git a/vendor/github.com/go-critic/go-critic/checkers/yodaStyleExpr_checker.go b/vendor/github.com/go-critic/go-critic/checkers/yodaStyleExpr_checker.go index ddd3099..b4672fc 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/yodaStyleExpr_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/yodaStyleExpr_checker.go @@ -4,28 +4,28 @@ import ( "go/ast" "go/token" - "github.com/go-lintpack/lintpack" - "github.com/go-lintpack/lintpack/astwalk" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astp" ) func init() { - var info lintpack.CheckerInfo + var info linter.CheckerInfo info.Name = "yodaStyleExpr" info.Tags = []string{"style", "experimental"} info.Summary = "Detects Yoda style expressions and suggests to replace them" info.Before = `return nil != ptr` info.After = `return ptr != nil` - collection.AddChecker(&info, func(ctx *lintpack.CheckerContext) lintpack.FileWalker { + collection.AddChecker(&info, func(ctx *linter.CheckerContext) linter.FileWalker { return astwalk.WalkerForLocalExpr(&yodaStyleExprChecker{ctx: ctx}) }) } type yodaStyleExprChecker struct { astwalk.WalkHandler - ctx *lintpack.CheckerContext + ctx *linter.CheckerContext } func (c *yodaStyleExprChecker) VisitLocalExpr(expr ast.Expr) { diff --git a/vendor/github.com/go-lintpack/lintpack/checkers_db.go b/vendor/github.com/go-critic/go-critic/framework/linter/checkers_db.go similarity index 97% rename from vendor/github.com/go-lintpack/lintpack/checkers_db.go rename to vendor/github.com/go-critic/go-critic/framework/linter/checkers_db.go index 83d41b4..191da41 100644 --- a/vendor/github.com/go-lintpack/lintpack/checkers_db.go +++ b/vendor/github.com/go-critic/go-critic/framework/linter/checkers_db.go @@ -1,4 +1,4 @@ -package lintpack +package linter import ( "fmt" @@ -116,7 +116,7 @@ func validateCheckerName(info *CheckerInfo) error { } func validateCheckerDocumentation(info *CheckerInfo) error { - // TODO(Quasilyte): validate documentation. + // TODO(quasilyte): validate documentation. return nil } diff --git a/vendor/github.com/go-lintpack/lintpack/context.go b/vendor/github.com/go-critic/go-critic/framework/linter/context.go similarity index 97% rename from vendor/github.com/go-lintpack/lintpack/context.go rename to vendor/github.com/go-critic/go-critic/framework/linter/context.go index 8671e17..6e108ab 100644 --- a/vendor/github.com/go-lintpack/lintpack/context.go +++ b/vendor/github.com/go-critic/go-critic/framework/linter/context.go @@ -1,4 +1,4 @@ -package lintpack +package linter import ( "go/ast" diff --git a/vendor/github.com/go-lintpack/lintpack/lintpack.go b/vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go similarity index 89% rename from vendor/github.com/go-lintpack/lintpack/lintpack.go rename to vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go index 28c3a63..bb97df1 100644 --- a/vendor/github.com/go-lintpack/lintpack/lintpack.go +++ b/vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go @@ -1,7 +1,6 @@ -package lintpack +package linter import ( - "fmt" "go/ast" "go/token" "go/types" @@ -25,7 +24,7 @@ type CheckerCollection struct { // constructor will not be called. func (coll *CheckerCollection) AddChecker(info *CheckerInfo, constructor func(*CheckerContext) FileWalker) { if coll == nil { - panic(fmt.Sprintf("adding checker to a nil collection")) + panic("adding checker to a nil collection") } info.Collection = coll addChecker(info, constructor) @@ -239,6 +238,27 @@ func (ctx *CheckerContext) Warn(node ast.Node, format string, args ...interface{ }) } +// UnknownType is a special sentinel value that is returned from the CheckerContext.TypeOf +// method instead of the nil type. +var UnknownType types.Type = types.Typ[types.Invalid] + +// TypeOf returns the type of expression x. +// +// Unlike TypesInfo.TypeOf, it never returns nil. +// Instead, it returns the Invalid type as a sentinel UnknownType value. +func (ctx *CheckerContext) TypeOf(x ast.Expr) types.Type { + typ := ctx.TypesInfo.TypeOf(x) + if typ != nil { + return typ + } + // Usually it means that some incorrect type info was loaded + // or the analyzed package was only partially (?) correct. + // To avoid nil pointer panics we can return a sentinel value + // that will fail most type assertions as well as kind checks + // (if the call side expects a *types.Basic). + return UnknownType +} + // FileWalker is an interface every checker should implement. // // The WalkFile method is executed for every Go file inside the diff --git a/vendor/github.com/go-lintpack/lintpack/.travis.yml b/vendor/github.com/go-lintpack/lintpack/.travis.yml deleted file mode 100644 index 41a0cba..0000000 --- a/vendor/github.com/go-lintpack/lintpack/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: go -go: - - 1.x -install: - - # Prevent default install action "go get -t -v ./...". -script: - - make ci diff --git a/vendor/github.com/go-lintpack/lintpack/Makefile b/vendor/github.com/go-lintpack/lintpack/Makefile deleted file mode 100644 index 63f21d2..0000000 --- a/vendor/github.com/go-lintpack/lintpack/Makefile +++ /dev/null @@ -1,14 +0,0 @@ -.PHONY: test ci - -%: # stubs to get makefile param for `test-checker` command - @: # see: https://stackoverflow.com/a/6273809/433041 - -build: - go build cmd/lintpack/build.go cmd/lintpack/main.go - -test: - go test -v -count=1 ./... - -ci: - go get -t -v ./... - go test -v -count=1 ./... diff --git a/vendor/github.com/go-lintpack/lintpack/README.md b/vendor/github.com/go-lintpack/lintpack/README.md deleted file mode 100644 index 5702228..0000000 --- a/vendor/github.com/go-lintpack/lintpack/README.md +++ /dev/null @@ -1,32 +0,0 @@ -[![Build Status][travis-image]][travis-url] -[![Go Report Card][go-report-image]][go-report-url] - -[travis-image]: https://travis-ci.org/go-critic/go-critic.svg?branch=master -[travis-url]: https://travis-ci.org/go-critic/go-critic -[go-report-image]: https://goreportcard.com/badge/github.com/go-critic/go-critic -[go-report-url]: https://goreportcard.com/report/github.com/go-critic/go-critic - -## Quick start / Installation / Usage - -Install `lintpack`: - -```bash -go get -v -u github.com/go-lintpack/lintpack/... -``` - -Install checkers from [go-critic/checkers](https://github.com/go-critic/checkers): - -```bash -# You'll need to have sources under your Go workspace first: -go get -v -u github.com/go-critic/checkers -# Now build a linter that includes all checks from that package: -lintpack build -o gocritic github.com/go-critic/checkers -# Executable gocritic is created and can be used as a standalone linter. -``` - -Produced binary includes basic help as well as supported checks documentation. - -So, the process is simple: - -* Get the `lintpack` linter builder -* Build linter from checks implemented in different repos, by various vendors diff --git a/vendor/github.com/go-lintpack/lintpack/doc.go b/vendor/github.com/go-lintpack/lintpack/doc.go deleted file mode 100644 index 4aba342..0000000 --- a/vendor/github.com/go-lintpack/lintpack/doc.go +++ /dev/null @@ -1,5 +0,0 @@ -// Package lintpack provides shared API between the linter and its checkers. -// -// Linter is usually implemented by creating instances of registered checkers. -// Checkers are registered by the AddChecker call. -package lintpack diff --git a/vendor/github.com/go-lintpack/lintpack/go.mod b/vendor/github.com/go-lintpack/lintpack/go.mod deleted file mode 100644 index b2e4cd9..0000000 --- a/vendor/github.com/go-lintpack/lintpack/go.mod +++ /dev/null @@ -1,11 +0,0 @@ -module github.com/go-lintpack/lintpack - -require ( - github.com/go-toolsmith/astequal v0.0.0-20180903214952-dcb477bfacd6 - github.com/go-toolsmith/astfmt v0.0.0-20180903215011-8f8ee99c3086 - github.com/go-toolsmith/astp v0.0.0-20180903215135-0af7e3c24f30 - github.com/go-toolsmith/pkgload v0.0.0-20181119091011-e9e65178eee8 - github.com/google/go-cmp v0.2.0 - github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e - golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09 -) diff --git a/vendor/github.com/go-lintpack/lintpack/go.sum b/vendor/github.com/go-lintpack/lintpack/go.sum deleted file mode 100644 index bd9f5dc..0000000 --- a/vendor/github.com/go-lintpack/lintpack/go.sum +++ /dev/null @@ -1,14 +0,0 @@ -github.com/go-toolsmith/astequal v0.0.0-20180903214952-dcb477bfacd6 h1:aTBUNRTatDDU24gbOEKEoLiDwxtc98ga6K/iMTm6fvs= -github.com/go-toolsmith/astequal v0.0.0-20180903214952-dcb477bfacd6/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= -github.com/go-toolsmith/astfmt v0.0.0-20180903215011-8f8ee99c3086 h1:EIMuvbE9fbtQtimdLe5yeXjuC5CeKbQt8zH6GwtIrhM= -github.com/go-toolsmith/astfmt v0.0.0-20180903215011-8f8ee99c3086/go.mod h1:mP93XdblcopXwlyN4X4uodxXQhldPGZbcEJIimQHrkg= -github.com/go-toolsmith/astp v0.0.0-20180903215135-0af7e3c24f30 h1:zRJPftZJNLPDiOtvYbFRwjSbaJAcVOf80TeEmWGe2kQ= -github.com/go-toolsmith/astp v0.0.0-20180903215135-0af7e3c24f30/go.mod h1:SV2ur98SGypH1UjcPpCatrV5hPazG6+IfNHbkDXBRrk= -github.com/go-toolsmith/pkgload v0.0.0-20181119091011-e9e65178eee8 h1:vVouagbdmqTVlCIAxpyYsNNTbkKZ3V66VpKOLU/s6W4= -github.com/go-toolsmith/pkgload v0.0.0-20181119091011-e9e65178eee8/go.mod h1:WoMrjiy4zvdS+Bg6z9jZH82QXwkcgCBX6nOfnmdaHks= -github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e h1:9MlwzLdW7QSDrhDjFlsEYmxpFyIoXmYRon3dt0io31k= -github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= -golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09 h1:QJFxMApN9XdBRwtqXfOidB2azUCA4ziuiMTrQ1uBGxw= -golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/vendor/github.com/go-toolsmith/typep/.travis.yml b/vendor/github.com/go-toolsmith/typep/.travis.yml index c32ac00..d3ff3cc 100644 --- a/vendor/github.com/go-toolsmith/typep/.travis.yml +++ b/vendor/github.com/go-toolsmith/typep/.travis.yml @@ -5,5 +5,5 @@ install: - # Prevent default install action "go get -t -v ./...". script: - go get -t -v ./... - - go tool vet . + - go vet ./... - go test -v -race ./... diff --git a/vendor/github.com/go-toolsmith/typep/README.md b/vendor/github.com/go-toolsmith/typep/README.md index 7366580..f797914 100644 --- a/vendor/github.com/go-toolsmith/typep/README.md +++ b/vendor/github.com/go-toolsmith/typep/README.md @@ -1,6 +1,6 @@ [![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/typep)](https://goreportcard.com/report/github.com/go-toolsmith/typep) [![GoDoc](https://godoc.org/github.com/go-toolsmith/typep?status.svg)](https://godoc.org/github.com/go-toolsmith/typep) - +[![Build Status](https://travis-ci.org/go-toolsmith/typep.svg?branch=master)](https://travis-ci.org/go-toolsmith/typep) # typep diff --git a/vendor/github.com/go-toolsmith/typep/predicates.go b/vendor/github.com/go-toolsmith/typep/predicates.go index 70e5b55..b07325a 100644 --- a/vendor/github.com/go-toolsmith/typep/predicates.go +++ b/vendor/github.com/go-toolsmith/typep/predicates.go @@ -27,7 +27,7 @@ func IsTypeExpr(info *types.Info, x ast.Expr) bool { _, ok := info.ObjectOf(x).(*types.TypeName) return ok - case *ast.FuncType, *ast.StructType, *ast.InterfaceType, *ast.ArrayType, *ast.MapType: + case *ast.FuncType, *ast.StructType, *ast.InterfaceType, *ast.ArrayType, *ast.MapType, *ast.ChanType: return true default: diff --git a/vendor/github.com/go-toolsmith/typep/safeExpr.go b/vendor/github.com/go-toolsmith/typep/safeExpr.go index 7236e6e..d5835d9 100644 --- a/vendor/github.com/go-toolsmith/typep/safeExpr.go +++ b/vendor/github.com/go-toolsmith/typep/safeExpr.go @@ -20,6 +20,10 @@ func SideEffectFree(info *types.Info, expr ast.Expr) bool { // whitelist to be on the conservative side. // Can be extended as needed. + if expr == nil { + return true + } + switch expr := expr.(type) { case *ast.StarExpr: return SideEffectFree(info, expr.X) @@ -31,6 +35,11 @@ func SideEffectFree(info *types.Info, expr ast.Expr) bool { SideEffectFree(info, expr.X) case *ast.BasicLit, *ast.Ident: return true + case *ast.SliceExpr: + return SideEffectFree(info, expr.X) && + SideEffectFree(info, expr.Low) && + SideEffectFree(info, expr.High) && + SideEffectFree(info, expr.Max) case *ast.IndexExpr: return SideEffectFree(info, expr.X) && SideEffectFree(info, expr.Index) @@ -38,6 +47,8 @@ func SideEffectFree(info *types.Info, expr ast.Expr) bool { return SideEffectFree(info, expr.X) case *ast.ParenExpr: return SideEffectFree(info, expr.X) + case *ast.TypeAssertExpr: + return SideEffectFree(info, expr.X) case *ast.CompositeLit: return SideEffectFreeList(info, expr.Elts) case *ast.CallExpr: diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/LICENSE b/vendor/github.com/go-xmlfmt/xmlfmt/LICENSE new file mode 100644 index 0000000..890776a --- /dev/null +++ b/vendor/github.com/go-xmlfmt/xmlfmt/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 go-xmlfmt + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/README.md b/vendor/github.com/go-xmlfmt/xmlfmt/README.md new file mode 100644 index 0000000..4eb6d69 --- /dev/null +++ b/vendor/github.com/go-xmlfmt/xmlfmt/README.md @@ -0,0 +1,178 @@ +# Go XML Formatter + +[![MIT License](http://img.shields.io/badge/License-MIT-blue.svg)](LICENSE) +[![Go Doc](https://img.shields.io/badge/godoc-reference-4b68a3.svg)](https://godoc.org/github.com/go-xmlfmt/xmlfmt) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-xmlfmt/xmlfmt)](https://goreportcard.com/report/github.com/go-xmlfmt/xmlfmt) +[![Codeship Status](https://codeship.com/projects/c49f02b0-a384-0134-fb20-2e0351080565/status?branch=master)](https://codeship.com/projects/190297) + +## Synopsis + +The Go XML Formatter, xmlfmt, will format the XML string in a readable way. + +```go +package main + +import "github.com/go-xmlfmt/xmlfmt" + +func main() { + xml1 := `aSome org-or-otherWouldnt you like to knowPatCalifia` + x := xmlfmt.FormatXML(xml1, "\t", " ") + print(x) +} + +``` + +Output: + +```xml + + + a + + + + + + Some org-or-other + + Wouldnt you like to know + + + + Pat + + Califia + + + + + +``` + +There is no XML decoding and encoding involved, only pure regular expression matching and replacing. So it is much faster than going through decoding and encoding procedures. Moreover, the exact XML source string is preserved, instead of being changed by the encoder. This is why this package exists in the first place. + +## Command + +To use it on command line, check out [xmlfmt](https://github.com/AntonioSun/xmlfmt): + + +``` +$ xmlfmt +XML Formatter +built on 2019-12-08 + +The xmlfmt will format the XML string without rewriting the document + +Options: + + -h, --help display help information + -f, --file *The xml file to read from (or stdin) + -p, --prefix each element begins on a new line and this prefix + -i, --indent[= ] indent string for nested elements +``` + + +## Justification + +### The format + +The Go XML Formatter is not called XML Beautifier because the result is not *exactly* as what people would expect -- some, but not all, closing tags stays on the same line, just as shown above. Having been looking at the result and thinking over it, I now think it is actually a better way to present it, as those closing tags on the same line are better stay that way in my opinion. I.e., + +When it comes to very big XML strings, which is what I’m dealing every day, saving spaces by not allowing those closing tags taking extra lines is plus instead of negative to me. + +### The alternative + +To format it “properly”, i.e., as what people would normally see, is very hard using pure regular expression. In fact, according to Sam Whited from the go-nuts mlist, + +> Regular expression is, well, regular. This means that they can parse regular grammars, but can't parse context free grammars (like XML). It is actually impossible to use a regex to do this task; it will always be fragile, unfortunately. + +So if the output format is so important to you, then unfortunately you have to go through decoding and encoding procedures. But there are some drawbacks as well, as put by James McGill, in http://stackoverflow.com/questions/21117161, besides such method being slow: + +> I like this solution, but am still in search of a Golang XML formatter/prettyprinter that doesn't rewrite the document (other than formatting whitespace). Marshalling or using the Encoder will change namespace declarations. +> +> For example an element like "< ns1:Element />" will be translated to something like '< Element xmlns="http://bla...bla/ns1" >< /Element >' which seems harmless enough except when the intent is to not alter the xml other than formatting. -- James McGill Nov 12 '15 + +Using Sam's code as an example, + +https://play.golang.org/p/JUqQY3WpW5 + +The above code formats the following XML + +```xml + + + + + + 123 + John Brown + + + + +``` + +into this: + +```xml + +
+ + + + 123 + John Brown + + + +
+``` + +I know they are syntactically the same, however the problem is that they *look* totally different. + +That's why there is this package, an XML Beautifier that doesn't rewrite the document. + +## Credit + +The credit goes to **diotalevi** from his post at http://www.perlmonks.org/?node_id=261292. + +However, it does not work for all cases. For example, + +```sh +$ echo '
123John Brown
' | perl -pe 's/(?<=>)\s+(?=<)//g; s(<(/?)([^/>]+)(/?)>\s*(?=(".($1&&($4 eq"
+123 +John Brown + + + + +``` + +I simplified the algorithm, and now it should work for all cases: + +```sh +echo '
123John Brown
' | perl -pe 's/(?<=>)\s+(?=<)//g; s(<(/?)([^>]+)(/?)>)($indent+=$3?0:$1?-1:1;"<$1$2$3>"."\n".(" "x$indent))ge' +``` +```xml + +
+
+ + + + + 123 + + John Brown + + + +
+``` + +This package is a direct translate from above Perl code into Go, +then further enhanced by @ruandao. diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go b/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go new file mode 100644 index 0000000..b744f5b --- /dev/null +++ b/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go @@ -0,0 +1,56 @@ +//////////////////////////////////////////////////////////////////////////// +// Porgram: xmlfmt.go +// Purpose: Go XML Beautify from XML string using pure string manipulation +// Authors: Antonio Sun (c) 2016-2019, All rights reserved +//////////////////////////////////////////////////////////////////////////// + +package xmlfmt + +import ( + "regexp" + "strings" +) + +var ( + reg = regexp.MustCompile(`<([/!]?)([^>]+?)(/?)>`) + // NL is the newline string used in XML output, define for DOS-convenient. + NL = "\r\n" +) + +// FormatXML will (purly) reformat the XML string in a readable way, without any rewriting/altering the structure +func FormatXML(xmls, prefix, indent string) string { + src := regexp.MustCompile(`(?s)>\s+<`).ReplaceAllString(xmls, "><") + + rf := replaceTag(prefix, indent) + return (prefix + reg.ReplaceAllStringFunc(src, rf)) +} + +// replaceTag returns a closure function to do 's/(?<=>)\s+(?=<)//g; s(<(/?)([^>]+?)(/?)>)($indent+=$3?0:$1?-1:1;"<$1$2$3>"."\n".(" "x$indent))ge' as in Perl +// and deal with comments as well +func replaceTag(prefix, indent string) func(string) string { + indentLevel := 0 + return func(m string) string { + // head elem + if strings.HasPrefix(m, "") { + return NL + prefix + strings.Repeat(indent, indentLevel) + m + } + // comment elem + if strings.HasPrefix(m, " 0 { @@ -122,6 +123,7 @@ func (f *Flock) try(locked *bool, flag uint32) (bool, error) { if err := f.setFh(); err != nil { return false, err } + defer f.ensureFhState() } _, errNo := lockFileEx(syscall.Handle(f.fh.Fd()), flag|winLockfileFailImmediately, 0, 1, 0, &syscall.Overlapped{}) diff --git a/vendor/github.com/gogo/protobuf/AUTHORS b/vendor/github.com/gogo/protobuf/AUTHORS deleted file mode 100644 index 3d97fc7..0000000 --- a/vendor/github.com/gogo/protobuf/AUTHORS +++ /dev/null @@ -1,15 +0,0 @@ -# This is the official list of GoGo authors for copyright purposes. -# This file is distinct from the CONTRIBUTORS file, which -# lists people. For example, employees are listed in CONTRIBUTORS, -# but not in AUTHORS, because the employer holds the copyright. - -# Names should be added to this file as one of -# Organization's name -# Individual's name -# Individual's name - -# Please keep the list sorted. - -Sendgrid, Inc -Vastech SA (PTY) LTD -Walter Schulze diff --git a/vendor/github.com/gogo/protobuf/CONTRIBUTORS b/vendor/github.com/gogo/protobuf/CONTRIBUTORS deleted file mode 100644 index 1b4f6c2..0000000 --- a/vendor/github.com/gogo/protobuf/CONTRIBUTORS +++ /dev/null @@ -1,23 +0,0 @@ -Anton Povarov -Brian Goff -Clayton Coleman -Denis Smirnov -DongYun Kang -Dwayne Schultz -Georg Apitz -Gustav Paul -Johan Brandhorst -John Shahid -John Tuley -Laurent -Patrick Lee -Peter Edge -Roger Johansson -Sam Nguyen -Sergio Arbeo -Stephen J Day -Tamir Duberstein -Todd Eisenberger -Tormod Erevik Lea -Vyacheslav Kim -Walter Schulze diff --git a/vendor/github.com/gogo/protobuf/proto/Makefile b/vendor/github.com/gogo/protobuf/proto/Makefile deleted file mode 100644 index 00d65f3..0000000 --- a/vendor/github.com/gogo/protobuf/proto/Makefile +++ /dev/null @@ -1,43 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -install: - go install - -test: install generate-test-pbs - go test - - -generate-test-pbs: - make install - make -C test_proto - make -C proto3_proto - make diff --git a/vendor/github.com/gogo/protobuf/proto/clone.go b/vendor/github.com/gogo/protobuf/proto/clone.go deleted file mode 100644 index a26b046..0000000 --- a/vendor/github.com/gogo/protobuf/proto/clone.go +++ /dev/null @@ -1,258 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2011 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Protocol buffer deep copy and merge. -// TODO: RawMessage. - -package proto - -import ( - "fmt" - "log" - "reflect" - "strings" -) - -// Clone returns a deep copy of a protocol buffer. -func Clone(src Message) Message { - in := reflect.ValueOf(src) - if in.IsNil() { - return src - } - out := reflect.New(in.Type().Elem()) - dst := out.Interface().(Message) - Merge(dst, src) - return dst -} - -// Merger is the interface representing objects that can merge messages of the same type. -type Merger interface { - // Merge merges src into this message. - // Required and optional fields that are set in src will be set to that value in dst. - // Elements of repeated fields will be appended. - // - // Merge may panic if called with a different argument type than the receiver. - Merge(src Message) -} - -// generatedMerger is the custom merge method that generated protos will have. -// We must add this method since a generate Merge method will conflict with -// many existing protos that have a Merge data field already defined. -type generatedMerger interface { - XXX_Merge(src Message) -} - -// Merge merges src into dst. -// Required and optional fields that are set in src will be set to that value in dst. -// Elements of repeated fields will be appended. -// Merge panics if src and dst are not the same type, or if dst is nil. -func Merge(dst, src Message) { - if m, ok := dst.(Merger); ok { - m.Merge(src) - return - } - - in := reflect.ValueOf(src) - out := reflect.ValueOf(dst) - if out.IsNil() { - panic("proto: nil destination") - } - if in.Type() != out.Type() { - panic(fmt.Sprintf("proto.Merge(%T, %T) type mismatch", dst, src)) - } - if in.IsNil() { - return // Merge from nil src is a noop - } - if m, ok := dst.(generatedMerger); ok { - m.XXX_Merge(src) - return - } - mergeStruct(out.Elem(), in.Elem()) -} - -func mergeStruct(out, in reflect.Value) { - sprop := GetProperties(in.Type()) - for i := 0; i < in.NumField(); i++ { - f := in.Type().Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - mergeAny(out.Field(i), in.Field(i), false, sprop.Prop[i]) - } - - if emIn, ok := in.Addr().Interface().(extensionsBytes); ok { - emOut := out.Addr().Interface().(extensionsBytes) - bIn := emIn.GetExtensions() - bOut := emOut.GetExtensions() - *bOut = append(*bOut, *bIn...) - } else if emIn, err := extendable(in.Addr().Interface()); err == nil { - emOut, _ := extendable(out.Addr().Interface()) - mIn, muIn := emIn.extensionsRead() - if mIn != nil { - mOut := emOut.extensionsWrite() - muIn.Lock() - mergeExtension(mOut, mIn) - muIn.Unlock() - } - } - - uf := in.FieldByName("XXX_unrecognized") - if !uf.IsValid() { - return - } - uin := uf.Bytes() - if len(uin) > 0 { - out.FieldByName("XXX_unrecognized").SetBytes(append([]byte(nil), uin...)) - } -} - -// mergeAny performs a merge between two values of the same type. -// viaPtr indicates whether the values were indirected through a pointer (implying proto2). -// prop is set if this is a struct field (it may be nil). -func mergeAny(out, in reflect.Value, viaPtr bool, prop *Properties) { - if in.Type() == protoMessageType { - if !in.IsNil() { - if out.IsNil() { - out.Set(reflect.ValueOf(Clone(in.Interface().(Message)))) - } else { - Merge(out.Interface().(Message), in.Interface().(Message)) - } - } - return - } - switch in.Kind() { - case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64, - reflect.String, reflect.Uint32, reflect.Uint64: - if !viaPtr && isProto3Zero(in) { - return - } - out.Set(in) - case reflect.Interface: - // Probably a oneof field; copy non-nil values. - if in.IsNil() { - return - } - // Allocate destination if it is not set, or set to a different type. - // Otherwise we will merge as normal. - if out.IsNil() || out.Elem().Type() != in.Elem().Type() { - out.Set(reflect.New(in.Elem().Elem().Type())) // interface -> *T -> T -> new(T) - } - mergeAny(out.Elem(), in.Elem(), false, nil) - case reflect.Map: - if in.Len() == 0 { - return - } - if out.IsNil() { - out.Set(reflect.MakeMap(in.Type())) - } - // For maps with value types of *T or []byte we need to deep copy each value. - elemKind := in.Type().Elem().Kind() - for _, key := range in.MapKeys() { - var val reflect.Value - switch elemKind { - case reflect.Ptr: - val = reflect.New(in.Type().Elem().Elem()) - mergeAny(val, in.MapIndex(key), false, nil) - case reflect.Slice: - val = in.MapIndex(key) - val = reflect.ValueOf(append([]byte{}, val.Bytes()...)) - default: - val = in.MapIndex(key) - } - out.SetMapIndex(key, val) - } - case reflect.Ptr: - if in.IsNil() { - return - } - if out.IsNil() { - out.Set(reflect.New(in.Elem().Type())) - } - mergeAny(out.Elem(), in.Elem(), true, nil) - case reflect.Slice: - if in.IsNil() { - return - } - if in.Type().Elem().Kind() == reflect.Uint8 { - // []byte is a scalar bytes field, not a repeated field. - - // Edge case: if this is in a proto3 message, a zero length - // bytes field is considered the zero value, and should not - // be merged. - if prop != nil && prop.proto3 && in.Len() == 0 { - return - } - - // Make a deep copy. - // Append to []byte{} instead of []byte(nil) so that we never end up - // with a nil result. - out.SetBytes(append([]byte{}, in.Bytes()...)) - return - } - n := in.Len() - if out.IsNil() { - out.Set(reflect.MakeSlice(in.Type(), 0, n)) - } - switch in.Type().Elem().Kind() { - case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64, - reflect.String, reflect.Uint32, reflect.Uint64: - out.Set(reflect.AppendSlice(out, in)) - default: - for i := 0; i < n; i++ { - x := reflect.Indirect(reflect.New(in.Type().Elem())) - mergeAny(x, in.Index(i), false, nil) - out.Set(reflect.Append(out, x)) - } - } - case reflect.Struct: - mergeStruct(out, in) - default: - // unknown type, so not a protocol buffer - log.Printf("proto: don't know how to copy %v", in) - } -} - -func mergeExtension(out, in map[int32]Extension) { - for extNum, eIn := range in { - eOut := Extension{desc: eIn.desc} - if eIn.value != nil { - v := reflect.New(reflect.TypeOf(eIn.value)).Elem() - mergeAny(v, reflect.ValueOf(eIn.value), false, nil) - eOut.value = v.Interface() - } - if eIn.enc != nil { - eOut.enc = make([]byte, len(eIn.enc)) - copy(eOut.enc, eIn.enc) - } - - out[extNum] = eOut - } -} diff --git a/vendor/github.com/gogo/protobuf/proto/custom_gogo.go b/vendor/github.com/gogo/protobuf/proto/custom_gogo.go deleted file mode 100644 index 2455248..0000000 --- a/vendor/github.com/gogo/protobuf/proto/custom_gogo.go +++ /dev/null @@ -1,39 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2018, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import "reflect" - -type custom interface { - Marshal() ([]byte, error) - Unmarshal(data []byte) error - Size() int -} - -var customType = reflect.TypeOf((*custom)(nil)).Elem() diff --git a/vendor/github.com/gogo/protobuf/proto/decode.go b/vendor/github.com/gogo/protobuf/proto/decode.go deleted file mode 100644 index 63b0f08..0000000 --- a/vendor/github.com/gogo/protobuf/proto/decode.go +++ /dev/null @@ -1,427 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Routines for decoding protocol buffer data to construct in-memory representations. - */ - -import ( - "errors" - "fmt" - "io" -) - -// errOverflow is returned when an integer is too large to be represented. -var errOverflow = errors.New("proto: integer overflow") - -// ErrInternalBadWireType is returned by generated code when an incorrect -// wire type is encountered. It does not get returned to user code. -var ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof") - -// DecodeVarint reads a varint-encoded integer from the slice. -// It returns the integer and the number of bytes consumed, or -// zero if there is not enough. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -func DecodeVarint(buf []byte) (x uint64, n int) { - for shift := uint(0); shift < 64; shift += 7 { - if n >= len(buf) { - return 0, 0 - } - b := uint64(buf[n]) - n++ - x |= (b & 0x7F) << shift - if (b & 0x80) == 0 { - return x, n - } - } - - // The number is too large to represent in a 64-bit value. - return 0, 0 -} - -func (p *Buffer) decodeVarintSlow() (x uint64, err error) { - i := p.index - l := len(p.buf) - - for shift := uint(0); shift < 64; shift += 7 { - if i >= l { - err = io.ErrUnexpectedEOF - return - } - b := p.buf[i] - i++ - x |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - p.index = i - return - } - } - - // The number is too large to represent in a 64-bit value. - err = errOverflow - return -} - -// DecodeVarint reads a varint-encoded integer from the Buffer. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -func (p *Buffer) DecodeVarint() (x uint64, err error) { - i := p.index - buf := p.buf - - if i >= len(buf) { - return 0, io.ErrUnexpectedEOF - } else if buf[i] < 0x80 { - p.index++ - return uint64(buf[i]), nil - } else if len(buf)-i < 10 { - return p.decodeVarintSlow() - } - - var b uint64 - // we already checked the first byte - x = uint64(buf[i]) - 0x80 - i++ - - b = uint64(buf[i]) - i++ - x += b << 7 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 7 - - b = uint64(buf[i]) - i++ - x += b << 14 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 14 - - b = uint64(buf[i]) - i++ - x += b << 21 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 21 - - b = uint64(buf[i]) - i++ - x += b << 28 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 28 - - b = uint64(buf[i]) - i++ - x += b << 35 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 35 - - b = uint64(buf[i]) - i++ - x += b << 42 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 42 - - b = uint64(buf[i]) - i++ - x += b << 49 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 49 - - b = uint64(buf[i]) - i++ - x += b << 56 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 56 - - b = uint64(buf[i]) - i++ - x += b << 63 - if b&0x80 == 0 { - goto done - } - - return 0, errOverflow - -done: - p.index = i - return x, nil -} - -// DecodeFixed64 reads a 64-bit integer from the Buffer. -// This is the format for the -// fixed64, sfixed64, and double protocol buffer types. -func (p *Buffer) DecodeFixed64() (x uint64, err error) { - // x, err already 0 - i := p.index + 8 - if i < 0 || i > len(p.buf) { - err = io.ErrUnexpectedEOF - return - } - p.index = i - - x = uint64(p.buf[i-8]) - x |= uint64(p.buf[i-7]) << 8 - x |= uint64(p.buf[i-6]) << 16 - x |= uint64(p.buf[i-5]) << 24 - x |= uint64(p.buf[i-4]) << 32 - x |= uint64(p.buf[i-3]) << 40 - x |= uint64(p.buf[i-2]) << 48 - x |= uint64(p.buf[i-1]) << 56 - return -} - -// DecodeFixed32 reads a 32-bit integer from the Buffer. -// This is the format for the -// fixed32, sfixed32, and float protocol buffer types. -func (p *Buffer) DecodeFixed32() (x uint64, err error) { - // x, err already 0 - i := p.index + 4 - if i < 0 || i > len(p.buf) { - err = io.ErrUnexpectedEOF - return - } - p.index = i - - x = uint64(p.buf[i-4]) - x |= uint64(p.buf[i-3]) << 8 - x |= uint64(p.buf[i-2]) << 16 - x |= uint64(p.buf[i-1]) << 24 - return -} - -// DecodeZigzag64 reads a zigzag-encoded 64-bit integer -// from the Buffer. -// This is the format used for the sint64 protocol buffer type. -func (p *Buffer) DecodeZigzag64() (x uint64, err error) { - x, err = p.DecodeVarint() - if err != nil { - return - } - x = (x >> 1) ^ uint64((int64(x&1)<<63)>>63) - return -} - -// DecodeZigzag32 reads a zigzag-encoded 32-bit integer -// from the Buffer. -// This is the format used for the sint32 protocol buffer type. -func (p *Buffer) DecodeZigzag32() (x uint64, err error) { - x, err = p.DecodeVarint() - if err != nil { - return - } - x = uint64((uint32(x) >> 1) ^ uint32((int32(x&1)<<31)>>31)) - return -} - -// DecodeRawBytes reads a count-delimited byte buffer from the Buffer. -// This is the format used for the bytes protocol buffer -// type and for embedded messages. -func (p *Buffer) DecodeRawBytes(alloc bool) (buf []byte, err error) { - n, err := p.DecodeVarint() - if err != nil { - return nil, err - } - - nb := int(n) - if nb < 0 { - return nil, fmt.Errorf("proto: bad byte length %d", nb) - } - end := p.index + nb - if end < p.index || end > len(p.buf) { - return nil, io.ErrUnexpectedEOF - } - - if !alloc { - // todo: check if can get more uses of alloc=false - buf = p.buf[p.index:end] - p.index += nb - return - } - - buf = make([]byte, nb) - copy(buf, p.buf[p.index:]) - p.index += nb - return -} - -// DecodeStringBytes reads an encoded string from the Buffer. -// This is the format used for the proto2 string type. -func (p *Buffer) DecodeStringBytes() (s string, err error) { - buf, err := p.DecodeRawBytes(false) - if err != nil { - return - } - return string(buf), nil -} - -// Unmarshaler is the interface representing objects that can -// unmarshal themselves. The argument points to data that may be -// overwritten, so implementations should not keep references to the -// buffer. -// Unmarshal implementations should not clear the receiver. -// Any unmarshaled data should be merged into the receiver. -// Callers of Unmarshal that do not want to retain existing data -// should Reset the receiver before calling Unmarshal. -type Unmarshaler interface { - Unmarshal([]byte) error -} - -// newUnmarshaler is the interface representing objects that can -// unmarshal themselves. The semantics are identical to Unmarshaler. -// -// This exists to support protoc-gen-go generated messages. -// The proto package will stop type-asserting to this interface in the future. -// -// DO NOT DEPEND ON THIS. -type newUnmarshaler interface { - XXX_Unmarshal([]byte) error -} - -// Unmarshal parses the protocol buffer representation in buf and places the -// decoded result in pb. If the struct underlying pb does not match -// the data in buf, the results can be unpredictable. -// -// Unmarshal resets pb before starting to unmarshal, so any -// existing data in pb is always removed. Use UnmarshalMerge -// to preserve and append to existing data. -func Unmarshal(buf []byte, pb Message) error { - pb.Reset() - if u, ok := pb.(newUnmarshaler); ok { - return u.XXX_Unmarshal(buf) - } - if u, ok := pb.(Unmarshaler); ok { - return u.Unmarshal(buf) - } - return NewBuffer(buf).Unmarshal(pb) -} - -// UnmarshalMerge parses the protocol buffer representation in buf and -// writes the decoded result to pb. If the struct underlying pb does not match -// the data in buf, the results can be unpredictable. -// -// UnmarshalMerge merges into existing data in pb. -// Most code should use Unmarshal instead. -func UnmarshalMerge(buf []byte, pb Message) error { - if u, ok := pb.(newUnmarshaler); ok { - return u.XXX_Unmarshal(buf) - } - if u, ok := pb.(Unmarshaler); ok { - // NOTE: The history of proto have unfortunately been inconsistent - // whether Unmarshaler should or should not implicitly clear itself. - // Some implementations do, most do not. - // Thus, calling this here may or may not do what people want. - // - // See https://github.com/golang/protobuf/issues/424 - return u.Unmarshal(buf) - } - return NewBuffer(buf).Unmarshal(pb) -} - -// DecodeMessage reads a count-delimited message from the Buffer. -func (p *Buffer) DecodeMessage(pb Message) error { - enc, err := p.DecodeRawBytes(false) - if err != nil { - return err - } - return NewBuffer(enc).Unmarshal(pb) -} - -// DecodeGroup reads a tag-delimited group from the Buffer. -// StartGroup tag is already consumed. This function consumes -// EndGroup tag. -func (p *Buffer) DecodeGroup(pb Message) error { - b := p.buf[p.index:] - x, y := findEndGroup(b) - if x < 0 { - return io.ErrUnexpectedEOF - } - err := Unmarshal(b[:x], pb) - p.index += y - return err -} - -// Unmarshal parses the protocol buffer representation in the -// Buffer and places the decoded result in pb. If the struct -// underlying pb does not match the data in the buffer, the results can be -// unpredictable. -// -// Unlike proto.Unmarshal, this does not reset pb before starting to unmarshal. -func (p *Buffer) Unmarshal(pb Message) error { - // If the object can unmarshal itself, let it. - if u, ok := pb.(newUnmarshaler); ok { - err := u.XXX_Unmarshal(p.buf[p.index:]) - p.index = len(p.buf) - return err - } - if u, ok := pb.(Unmarshaler); ok { - // NOTE: The history of proto have unfortunately been inconsistent - // whether Unmarshaler should or should not implicitly clear itself. - // Some implementations do, most do not. - // Thus, calling this here may or may not do what people want. - // - // See https://github.com/golang/protobuf/issues/424 - err := u.Unmarshal(p.buf[p.index:]) - p.index = len(p.buf) - return err - } - - // Slow workaround for messages that aren't Unmarshalers. - // This includes some hand-coded .pb.go files and - // bootstrap protos. - // TODO: fix all of those and then add Unmarshal to - // the Message interface. Then: - // The cast above and code below can be deleted. - // The old unmarshaler can be deleted. - // Clients can call Unmarshal directly (can already do that, actually). - var info InternalMessageInfo - err := info.Unmarshal(pb, p.buf[p.index:]) - p.index = len(p.buf) - return err -} diff --git a/vendor/github.com/gogo/protobuf/proto/deprecated.go b/vendor/github.com/gogo/protobuf/proto/deprecated.go deleted file mode 100644 index 35b882c..0000000 --- a/vendor/github.com/gogo/protobuf/proto/deprecated.go +++ /dev/null @@ -1,63 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2018 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import "errors" - -// Deprecated: do not use. -type Stats struct{ Emalloc, Dmalloc, Encode, Decode, Chit, Cmiss, Size uint64 } - -// Deprecated: do not use. -func GetStats() Stats { return Stats{} } - -// Deprecated: do not use. -func MarshalMessageSet(interface{}) ([]byte, error) { - return nil, errors.New("proto: not implemented") -} - -// Deprecated: do not use. -func UnmarshalMessageSet([]byte, interface{}) error { - return errors.New("proto: not implemented") -} - -// Deprecated: do not use. -func MarshalMessageSetJSON(interface{}) ([]byte, error) { - return nil, errors.New("proto: not implemented") -} - -// Deprecated: do not use. -func UnmarshalMessageSetJSON([]byte, interface{}) error { - return errors.New("proto: not implemented") -} - -// Deprecated: do not use. -func RegisterMessageSetType(Message, int32, string) {} diff --git a/vendor/github.com/gogo/protobuf/proto/discard.go b/vendor/github.com/gogo/protobuf/proto/discard.go deleted file mode 100644 index fe1bd7d..0000000 --- a/vendor/github.com/gogo/protobuf/proto/discard.go +++ /dev/null @@ -1,350 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2017 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "fmt" - "reflect" - "strings" - "sync" - "sync/atomic" -) - -type generatedDiscarder interface { - XXX_DiscardUnknown() -} - -// DiscardUnknown recursively discards all unknown fields from this message -// and all embedded messages. -// -// When unmarshaling a message with unrecognized fields, the tags and values -// of such fields are preserved in the Message. This allows a later call to -// marshal to be able to produce a message that continues to have those -// unrecognized fields. To avoid this, DiscardUnknown is used to -// explicitly clear the unknown fields after unmarshaling. -// -// For proto2 messages, the unknown fields of message extensions are only -// discarded from messages that have been accessed via GetExtension. -func DiscardUnknown(m Message) { - if m, ok := m.(generatedDiscarder); ok { - m.XXX_DiscardUnknown() - return - } - // TODO: Dynamically populate a InternalMessageInfo for legacy messages, - // but the master branch has no implementation for InternalMessageInfo, - // so it would be more work to replicate that approach. - discardLegacy(m) -} - -// DiscardUnknown recursively discards all unknown fields. -func (a *InternalMessageInfo) DiscardUnknown(m Message) { - di := atomicLoadDiscardInfo(&a.discard) - if di == nil { - di = getDiscardInfo(reflect.TypeOf(m).Elem()) - atomicStoreDiscardInfo(&a.discard, di) - } - di.discard(toPointer(&m)) -} - -type discardInfo struct { - typ reflect.Type - - initialized int32 // 0: only typ is valid, 1: everything is valid - lock sync.Mutex - - fields []discardFieldInfo - unrecognized field -} - -type discardFieldInfo struct { - field field // Offset of field, guaranteed to be valid - discard func(src pointer) -} - -var ( - discardInfoMap = map[reflect.Type]*discardInfo{} - discardInfoLock sync.Mutex -) - -func getDiscardInfo(t reflect.Type) *discardInfo { - discardInfoLock.Lock() - defer discardInfoLock.Unlock() - di := discardInfoMap[t] - if di == nil { - di = &discardInfo{typ: t} - discardInfoMap[t] = di - } - return di -} - -func (di *discardInfo) discard(src pointer) { - if src.isNil() { - return // Nothing to do. - } - - if atomic.LoadInt32(&di.initialized) == 0 { - di.computeDiscardInfo() - } - - for _, fi := range di.fields { - sfp := src.offset(fi.field) - fi.discard(sfp) - } - - // For proto2 messages, only discard unknown fields in message extensions - // that have been accessed via GetExtension. - if em, err := extendable(src.asPointerTo(di.typ).Interface()); err == nil { - // Ignore lock since DiscardUnknown is not concurrency safe. - emm, _ := em.extensionsRead() - for _, mx := range emm { - if m, ok := mx.value.(Message); ok { - DiscardUnknown(m) - } - } - } - - if di.unrecognized.IsValid() { - *src.offset(di.unrecognized).toBytes() = nil - } -} - -func (di *discardInfo) computeDiscardInfo() { - di.lock.Lock() - defer di.lock.Unlock() - if di.initialized != 0 { - return - } - t := di.typ - n := t.NumField() - - for i := 0; i < n; i++ { - f := t.Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - - dfi := discardFieldInfo{field: toField(&f)} - tf := f.Type - - // Unwrap tf to get its most basic type. - var isPointer, isSlice bool - if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { - isSlice = true - tf = tf.Elem() - } - if tf.Kind() == reflect.Ptr { - isPointer = true - tf = tf.Elem() - } - if isPointer && isSlice && tf.Kind() != reflect.Struct { - panic(fmt.Sprintf("%v.%s cannot be a slice of pointers to primitive types", t, f.Name)) - } - - switch tf.Kind() { - case reflect.Struct: - switch { - case !isPointer: - panic(fmt.Sprintf("%v.%s cannot be a direct struct value", t, f.Name)) - case isSlice: // E.g., []*pb.T - discardInfo := getDiscardInfo(tf) - dfi.discard = func(src pointer) { - sps := src.getPointerSlice() - for _, sp := range sps { - if !sp.isNil() { - discardInfo.discard(sp) - } - } - } - default: // E.g., *pb.T - discardInfo := getDiscardInfo(tf) - dfi.discard = func(src pointer) { - sp := src.getPointer() - if !sp.isNil() { - discardInfo.discard(sp) - } - } - } - case reflect.Map: - switch { - case isPointer || isSlice: - panic(fmt.Sprintf("%v.%s cannot be a pointer to a map or a slice of map values", t, f.Name)) - default: // E.g., map[K]V - if tf.Elem().Kind() == reflect.Ptr { // Proto struct (e.g., *T) - dfi.discard = func(src pointer) { - sm := src.asPointerTo(tf).Elem() - if sm.Len() == 0 { - return - } - for _, key := range sm.MapKeys() { - val := sm.MapIndex(key) - DiscardUnknown(val.Interface().(Message)) - } - } - } else { - dfi.discard = func(pointer) {} // Noop - } - } - case reflect.Interface: - // Must be oneof field. - switch { - case isPointer || isSlice: - panic(fmt.Sprintf("%v.%s cannot be a pointer to a interface or a slice of interface values", t, f.Name)) - default: // E.g., interface{} - // TODO: Make this faster? - dfi.discard = func(src pointer) { - su := src.asPointerTo(tf).Elem() - if !su.IsNil() { - sv := su.Elem().Elem().Field(0) - if sv.Kind() == reflect.Ptr && sv.IsNil() { - return - } - switch sv.Type().Kind() { - case reflect.Ptr: // Proto struct (e.g., *T) - DiscardUnknown(sv.Interface().(Message)) - } - } - } - } - default: - continue - } - di.fields = append(di.fields, dfi) - } - - di.unrecognized = invalidField - if f, ok := t.FieldByName("XXX_unrecognized"); ok { - if f.Type != reflect.TypeOf([]byte{}) { - panic("expected XXX_unrecognized to be of type []byte") - } - di.unrecognized = toField(&f) - } - - atomic.StoreInt32(&di.initialized, 1) -} - -func discardLegacy(m Message) { - v := reflect.ValueOf(m) - if v.Kind() != reflect.Ptr || v.IsNil() { - return - } - v = v.Elem() - if v.Kind() != reflect.Struct { - return - } - t := v.Type() - - for i := 0; i < v.NumField(); i++ { - f := t.Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - vf := v.Field(i) - tf := f.Type - - // Unwrap tf to get its most basic type. - var isPointer, isSlice bool - if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { - isSlice = true - tf = tf.Elem() - } - if tf.Kind() == reflect.Ptr { - isPointer = true - tf = tf.Elem() - } - if isPointer && isSlice && tf.Kind() != reflect.Struct { - panic(fmt.Sprintf("%T.%s cannot be a slice of pointers to primitive types", m, f.Name)) - } - - switch tf.Kind() { - case reflect.Struct: - switch { - case !isPointer: - panic(fmt.Sprintf("%T.%s cannot be a direct struct value", m, f.Name)) - case isSlice: // E.g., []*pb.T - for j := 0; j < vf.Len(); j++ { - discardLegacy(vf.Index(j).Interface().(Message)) - } - default: // E.g., *pb.T - discardLegacy(vf.Interface().(Message)) - } - case reflect.Map: - switch { - case isPointer || isSlice: - panic(fmt.Sprintf("%T.%s cannot be a pointer to a map or a slice of map values", m, f.Name)) - default: // E.g., map[K]V - tv := vf.Type().Elem() - if tv.Kind() == reflect.Ptr && tv.Implements(protoMessageType) { // Proto struct (e.g., *T) - for _, key := range vf.MapKeys() { - val := vf.MapIndex(key) - discardLegacy(val.Interface().(Message)) - } - } - } - case reflect.Interface: - // Must be oneof field. - switch { - case isPointer || isSlice: - panic(fmt.Sprintf("%T.%s cannot be a pointer to a interface or a slice of interface values", m, f.Name)) - default: // E.g., test_proto.isCommunique_Union interface - if !vf.IsNil() && f.Tag.Get("protobuf_oneof") != "" { - vf = vf.Elem() // E.g., *test_proto.Communique_Msg - if !vf.IsNil() { - vf = vf.Elem() // E.g., test_proto.Communique_Msg - vf = vf.Field(0) // E.g., Proto struct (e.g., *T) or primitive value - if vf.Kind() == reflect.Ptr { - discardLegacy(vf.Interface().(Message)) - } - } - } - } - } - } - - if vf := v.FieldByName("XXX_unrecognized"); vf.IsValid() { - if vf.Type() != reflect.TypeOf([]byte{}) { - panic("expected XXX_unrecognized to be of type []byte") - } - vf.Set(reflect.ValueOf([]byte(nil))) - } - - // For proto2 messages, only discard unknown fields in message extensions - // that have been accessed via GetExtension. - if em, err := extendable(m); err == nil { - // Ignore lock since discardLegacy is not concurrency safe. - emm, _ := em.extensionsRead() - for _, mx := range emm { - if m, ok := mx.value.(Message); ok { - discardLegacy(m) - } - } - } -} diff --git a/vendor/github.com/gogo/protobuf/proto/duration.go b/vendor/github.com/gogo/protobuf/proto/duration.go deleted file mode 100644 index 93464c9..0000000 --- a/vendor/github.com/gogo/protobuf/proto/duration.go +++ /dev/null @@ -1,100 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -// This file implements conversions between google.protobuf.Duration -// and time.Duration. - -import ( - "errors" - "fmt" - "time" -) - -const ( - // Range of a Duration in seconds, as specified in - // google/protobuf/duration.proto. This is about 10,000 years in seconds. - maxSeconds = int64(10000 * 365.25 * 24 * 60 * 60) - minSeconds = -maxSeconds -) - -// validateDuration determines whether the Duration is valid according to the -// definition in google/protobuf/duration.proto. A valid Duration -// may still be too large to fit into a time.Duration (the range of Duration -// is about 10,000 years, and the range of time.Duration is about 290). -func validateDuration(d *duration) error { - if d == nil { - return errors.New("duration: nil Duration") - } - if d.Seconds < minSeconds || d.Seconds > maxSeconds { - return fmt.Errorf("duration: %#v: seconds out of range", d) - } - if d.Nanos <= -1e9 || d.Nanos >= 1e9 { - return fmt.Errorf("duration: %#v: nanos out of range", d) - } - // Seconds and Nanos must have the same sign, unless d.Nanos is zero. - if (d.Seconds < 0 && d.Nanos > 0) || (d.Seconds > 0 && d.Nanos < 0) { - return fmt.Errorf("duration: %#v: seconds and nanos have different signs", d) - } - return nil -} - -// DurationFromProto converts a Duration to a time.Duration. DurationFromProto -// returns an error if the Duration is invalid or is too large to be -// represented in a time.Duration. -func durationFromProto(p *duration) (time.Duration, error) { - if err := validateDuration(p); err != nil { - return 0, err - } - d := time.Duration(p.Seconds) * time.Second - if int64(d/time.Second) != p.Seconds { - return 0, fmt.Errorf("duration: %#v is out of range for time.Duration", p) - } - if p.Nanos != 0 { - d += time.Duration(p.Nanos) - if (d < 0) != (p.Nanos < 0) { - return 0, fmt.Errorf("duration: %#v is out of range for time.Duration", p) - } - } - return d, nil -} - -// DurationProto converts a time.Duration to a Duration. -func durationProto(d time.Duration) *duration { - nanos := d.Nanoseconds() - secs := nanos / 1e9 - nanos -= secs * 1e9 - return &duration{ - Seconds: secs, - Nanos: int32(nanos), - } -} diff --git a/vendor/github.com/gogo/protobuf/proto/duration_gogo.go b/vendor/github.com/gogo/protobuf/proto/duration_gogo.go deleted file mode 100644 index e748e17..0000000 --- a/vendor/github.com/gogo/protobuf/proto/duration_gogo.go +++ /dev/null @@ -1,49 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2016, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "reflect" - "time" -) - -var durationType = reflect.TypeOf((*time.Duration)(nil)).Elem() - -type duration struct { - Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` - Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` -} - -func (m *duration) Reset() { *m = duration{} } -func (*duration) ProtoMessage() {} -func (*duration) String() string { return "duration" } - -func init() { - RegisterType((*duration)(nil), "gogo.protobuf.proto.duration") -} diff --git a/vendor/github.com/gogo/protobuf/proto/encode.go b/vendor/github.com/gogo/protobuf/proto/encode.go deleted file mode 100644 index 3abfed2..0000000 --- a/vendor/github.com/gogo/protobuf/proto/encode.go +++ /dev/null @@ -1,203 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Routines for encoding data into the wire format for protocol buffers. - */ - -import ( - "errors" - "reflect" -) - -var ( - // errRepeatedHasNil is the error returned if Marshal is called with - // a struct with a repeated field containing a nil element. - errRepeatedHasNil = errors.New("proto: repeated field has nil element") - - // errOneofHasNil is the error returned if Marshal is called with - // a struct with a oneof field containing a nil element. - errOneofHasNil = errors.New("proto: oneof field has nil value") - - // ErrNil is the error returned if Marshal is called with nil. - ErrNil = errors.New("proto: Marshal called with nil") - - // ErrTooLarge is the error returned if Marshal is called with a - // message that encodes to >2GB. - ErrTooLarge = errors.New("proto: message encodes to over 2 GB") -) - -// The fundamental encoders that put bytes on the wire. -// Those that take integer types all accept uint64 and are -// therefore of type valueEncoder. - -const maxVarintBytes = 10 // maximum length of a varint - -// EncodeVarint returns the varint encoding of x. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -// Not used by the package itself, but helpful to clients -// wishing to use the same encoding. -func EncodeVarint(x uint64) []byte { - var buf [maxVarintBytes]byte - var n int - for n = 0; x > 127; n++ { - buf[n] = 0x80 | uint8(x&0x7F) - x >>= 7 - } - buf[n] = uint8(x) - n++ - return buf[0:n] -} - -// EncodeVarint writes a varint-encoded integer to the Buffer. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -func (p *Buffer) EncodeVarint(x uint64) error { - for x >= 1<<7 { - p.buf = append(p.buf, uint8(x&0x7f|0x80)) - x >>= 7 - } - p.buf = append(p.buf, uint8(x)) - return nil -} - -// SizeVarint returns the varint encoding size of an integer. -func SizeVarint(x uint64) int { - switch { - case x < 1<<7: - return 1 - case x < 1<<14: - return 2 - case x < 1<<21: - return 3 - case x < 1<<28: - return 4 - case x < 1<<35: - return 5 - case x < 1<<42: - return 6 - case x < 1<<49: - return 7 - case x < 1<<56: - return 8 - case x < 1<<63: - return 9 - } - return 10 -} - -// EncodeFixed64 writes a 64-bit integer to the Buffer. -// This is the format for the -// fixed64, sfixed64, and double protocol buffer types. -func (p *Buffer) EncodeFixed64(x uint64) error { - p.buf = append(p.buf, - uint8(x), - uint8(x>>8), - uint8(x>>16), - uint8(x>>24), - uint8(x>>32), - uint8(x>>40), - uint8(x>>48), - uint8(x>>56)) - return nil -} - -// EncodeFixed32 writes a 32-bit integer to the Buffer. -// This is the format for the -// fixed32, sfixed32, and float protocol buffer types. -func (p *Buffer) EncodeFixed32(x uint64) error { - p.buf = append(p.buf, - uint8(x), - uint8(x>>8), - uint8(x>>16), - uint8(x>>24)) - return nil -} - -// EncodeZigzag64 writes a zigzag-encoded 64-bit integer -// to the Buffer. -// This is the format used for the sint64 protocol buffer type. -func (p *Buffer) EncodeZigzag64(x uint64) error { - // use signed number to get arithmetic right shift. - return p.EncodeVarint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} - -// EncodeZigzag32 writes a zigzag-encoded 32-bit integer -// to the Buffer. -// This is the format used for the sint32 protocol buffer type. -func (p *Buffer) EncodeZigzag32(x uint64) error { - // use signed number to get arithmetic right shift. - return p.EncodeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) -} - -// EncodeRawBytes writes a count-delimited byte buffer to the Buffer. -// This is the format used for the bytes protocol buffer -// type and for embedded messages. -func (p *Buffer) EncodeRawBytes(b []byte) error { - p.EncodeVarint(uint64(len(b))) - p.buf = append(p.buf, b...) - return nil -} - -// EncodeStringBytes writes an encoded string to the Buffer. -// This is the format used for the proto2 string type. -func (p *Buffer) EncodeStringBytes(s string) error { - p.EncodeVarint(uint64(len(s))) - p.buf = append(p.buf, s...) - return nil -} - -// Marshaler is the interface representing objects that can marshal themselves. -type Marshaler interface { - Marshal() ([]byte, error) -} - -// EncodeMessage writes the protocol buffer to the Buffer, -// prefixed by a varint-encoded length. -func (p *Buffer) EncodeMessage(pb Message) error { - siz := Size(pb) - p.EncodeVarint(uint64(siz)) - return p.Marshal(pb) -} - -// All protocol buffer fields are nillable, but be careful. -func isNil(v reflect.Value) bool { - switch v.Kind() { - case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return v.IsNil() - } - return false -} diff --git a/vendor/github.com/gogo/protobuf/proto/encode_gogo.go b/vendor/github.com/gogo/protobuf/proto/encode_gogo.go deleted file mode 100644 index 0f5fb17..0000000 --- a/vendor/github.com/gogo/protobuf/proto/encode_gogo.go +++ /dev/null @@ -1,33 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -func NewRequiredNotSetError(field string) *RequiredNotSetError { - return &RequiredNotSetError{field} -} diff --git a/vendor/github.com/gogo/protobuf/proto/equal.go b/vendor/github.com/gogo/protobuf/proto/equal.go deleted file mode 100644 index d4db5a1..0000000 --- a/vendor/github.com/gogo/protobuf/proto/equal.go +++ /dev/null @@ -1,300 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2011 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Protocol buffer comparison. - -package proto - -import ( - "bytes" - "log" - "reflect" - "strings" -) - -/* -Equal returns true iff protocol buffers a and b are equal. -The arguments must both be pointers to protocol buffer structs. - -Equality is defined in this way: - - Two messages are equal iff they are the same type, - corresponding fields are equal, unknown field sets - are equal, and extensions sets are equal. - - Two set scalar fields are equal iff their values are equal. - If the fields are of a floating-point type, remember that - NaN != x for all x, including NaN. If the message is defined - in a proto3 .proto file, fields are not "set"; specifically, - zero length proto3 "bytes" fields are equal (nil == {}). - - Two repeated fields are equal iff their lengths are the same, - and their corresponding elements are equal. Note a "bytes" field, - although represented by []byte, is not a repeated field and the - rule for the scalar fields described above applies. - - Two unset fields are equal. - - Two unknown field sets are equal if their current - encoded state is equal. - - Two extension sets are equal iff they have corresponding - elements that are pairwise equal. - - Two map fields are equal iff their lengths are the same, - and they contain the same set of elements. Zero-length map - fields are equal. - - Every other combination of things are not equal. - -The return value is undefined if a and b are not protocol buffers. -*/ -func Equal(a, b Message) bool { - if a == nil || b == nil { - return a == b - } - v1, v2 := reflect.ValueOf(a), reflect.ValueOf(b) - if v1.Type() != v2.Type() { - return false - } - if v1.Kind() == reflect.Ptr { - if v1.IsNil() { - return v2.IsNil() - } - if v2.IsNil() { - return false - } - v1, v2 = v1.Elem(), v2.Elem() - } - if v1.Kind() != reflect.Struct { - return false - } - return equalStruct(v1, v2) -} - -// v1 and v2 are known to have the same type. -func equalStruct(v1, v2 reflect.Value) bool { - sprop := GetProperties(v1.Type()) - for i := 0; i < v1.NumField(); i++ { - f := v1.Type().Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - f1, f2 := v1.Field(i), v2.Field(i) - if f.Type.Kind() == reflect.Ptr { - if n1, n2 := f1.IsNil(), f2.IsNil(); n1 && n2 { - // both unset - continue - } else if n1 != n2 { - // set/unset mismatch - return false - } - f1, f2 = f1.Elem(), f2.Elem() - } - if !equalAny(f1, f2, sprop.Prop[i]) { - return false - } - } - - if em1 := v1.FieldByName("XXX_InternalExtensions"); em1.IsValid() { - em2 := v2.FieldByName("XXX_InternalExtensions") - if !equalExtensions(v1.Type(), em1.Interface().(XXX_InternalExtensions), em2.Interface().(XXX_InternalExtensions)) { - return false - } - } - - if em1 := v1.FieldByName("XXX_extensions"); em1.IsValid() { - em2 := v2.FieldByName("XXX_extensions") - if !equalExtMap(v1.Type(), em1.Interface().(map[int32]Extension), em2.Interface().(map[int32]Extension)) { - return false - } - } - - uf := v1.FieldByName("XXX_unrecognized") - if !uf.IsValid() { - return true - } - - u1 := uf.Bytes() - u2 := v2.FieldByName("XXX_unrecognized").Bytes() - return bytes.Equal(u1, u2) -} - -// v1 and v2 are known to have the same type. -// prop may be nil. -func equalAny(v1, v2 reflect.Value, prop *Properties) bool { - if v1.Type() == protoMessageType { - m1, _ := v1.Interface().(Message) - m2, _ := v2.Interface().(Message) - return Equal(m1, m2) - } - switch v1.Kind() { - case reflect.Bool: - return v1.Bool() == v2.Bool() - case reflect.Float32, reflect.Float64: - return v1.Float() == v2.Float() - case reflect.Int32, reflect.Int64: - return v1.Int() == v2.Int() - case reflect.Interface: - // Probably a oneof field; compare the inner values. - n1, n2 := v1.IsNil(), v2.IsNil() - if n1 || n2 { - return n1 == n2 - } - e1, e2 := v1.Elem(), v2.Elem() - if e1.Type() != e2.Type() { - return false - } - return equalAny(e1, e2, nil) - case reflect.Map: - if v1.Len() != v2.Len() { - return false - } - for _, key := range v1.MapKeys() { - val2 := v2.MapIndex(key) - if !val2.IsValid() { - // This key was not found in the second map. - return false - } - if !equalAny(v1.MapIndex(key), val2, nil) { - return false - } - } - return true - case reflect.Ptr: - // Maps may have nil values in them, so check for nil. - if v1.IsNil() && v2.IsNil() { - return true - } - if v1.IsNil() != v2.IsNil() { - return false - } - return equalAny(v1.Elem(), v2.Elem(), prop) - case reflect.Slice: - if v1.Type().Elem().Kind() == reflect.Uint8 { - // short circuit: []byte - - // Edge case: if this is in a proto3 message, a zero length - // bytes field is considered the zero value. - if prop != nil && prop.proto3 && v1.Len() == 0 && v2.Len() == 0 { - return true - } - if v1.IsNil() != v2.IsNil() { - return false - } - return bytes.Equal(v1.Interface().([]byte), v2.Interface().([]byte)) - } - - if v1.Len() != v2.Len() { - return false - } - for i := 0; i < v1.Len(); i++ { - if !equalAny(v1.Index(i), v2.Index(i), prop) { - return false - } - } - return true - case reflect.String: - return v1.Interface().(string) == v2.Interface().(string) - case reflect.Struct: - return equalStruct(v1, v2) - case reflect.Uint32, reflect.Uint64: - return v1.Uint() == v2.Uint() - } - - // unknown type, so not a protocol buffer - log.Printf("proto: don't know how to compare %v", v1) - return false -} - -// base is the struct type that the extensions are based on. -// x1 and x2 are InternalExtensions. -func equalExtensions(base reflect.Type, x1, x2 XXX_InternalExtensions) bool { - em1, _ := x1.extensionsRead() - em2, _ := x2.extensionsRead() - return equalExtMap(base, em1, em2) -} - -func equalExtMap(base reflect.Type, em1, em2 map[int32]Extension) bool { - if len(em1) != len(em2) { - return false - } - - for extNum, e1 := range em1 { - e2, ok := em2[extNum] - if !ok { - return false - } - - m1, m2 := e1.value, e2.value - - if m1 == nil && m2 == nil { - // Both have only encoded form. - if bytes.Equal(e1.enc, e2.enc) { - continue - } - // The bytes are different, but the extensions might still be - // equal. We need to decode them to compare. - } - - if m1 != nil && m2 != nil { - // Both are unencoded. - if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2), nil) { - return false - } - continue - } - - // At least one is encoded. To do a semantically correct comparison - // we need to unmarshal them first. - var desc *ExtensionDesc - if m := extensionMaps[base]; m != nil { - desc = m[extNum] - } - if desc == nil { - // If both have only encoded form and the bytes are the same, - // it is handled above. We get here when the bytes are different. - // We don't know how to decode it, so just compare them as byte - // slices. - log.Printf("proto: don't know how to compare extension %d of %v", extNum, base) - return false - } - var err error - if m1 == nil { - m1, err = decodeExtension(e1.enc, desc) - } - if m2 == nil && err == nil { - m2, err = decodeExtension(e2.enc, desc) - } - if err != nil { - // The encoded form is invalid. - log.Printf("proto: badly encoded extension %d of %v: %v", extNum, base, err) - return false - } - if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2), nil) { - return false - } - } - - return true -} diff --git a/vendor/github.com/gogo/protobuf/proto/extensions.go b/vendor/github.com/gogo/protobuf/proto/extensions.go deleted file mode 100644 index 686bd2a..0000000 --- a/vendor/github.com/gogo/protobuf/proto/extensions.go +++ /dev/null @@ -1,604 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Types and routines for supporting protocol buffer extensions. - */ - -import ( - "errors" - "fmt" - "io" - "reflect" - "strconv" - "sync" -) - -// ErrMissingExtension is the error returned by GetExtension if the named extension is not in the message. -var ErrMissingExtension = errors.New("proto: missing extension") - -// ExtensionRange represents a range of message extensions for a protocol buffer. -// Used in code generated by the protocol compiler. -type ExtensionRange struct { - Start, End int32 // both inclusive -} - -// extendableProto is an interface implemented by any protocol buffer generated by the current -// proto compiler that may be extended. -type extendableProto interface { - Message - ExtensionRangeArray() []ExtensionRange - extensionsWrite() map[int32]Extension - extensionsRead() (map[int32]Extension, sync.Locker) -} - -// extendableProtoV1 is an interface implemented by a protocol buffer generated by the previous -// version of the proto compiler that may be extended. -type extendableProtoV1 interface { - Message - ExtensionRangeArray() []ExtensionRange - ExtensionMap() map[int32]Extension -} - -// extensionAdapter is a wrapper around extendableProtoV1 that implements extendableProto. -type extensionAdapter struct { - extendableProtoV1 -} - -func (e extensionAdapter) extensionsWrite() map[int32]Extension { - return e.ExtensionMap() -} - -func (e extensionAdapter) extensionsRead() (map[int32]Extension, sync.Locker) { - return e.ExtensionMap(), notLocker{} -} - -// notLocker is a sync.Locker whose Lock and Unlock methods are nops. -type notLocker struct{} - -func (n notLocker) Lock() {} -func (n notLocker) Unlock() {} - -// extendable returns the extendableProto interface for the given generated proto message. -// If the proto message has the old extension format, it returns a wrapper that implements -// the extendableProto interface. -func extendable(p interface{}) (extendableProto, error) { - switch p := p.(type) { - case extendableProto: - if isNilPtr(p) { - return nil, fmt.Errorf("proto: nil %T is not extendable", p) - } - return p, nil - case extendableProtoV1: - if isNilPtr(p) { - return nil, fmt.Errorf("proto: nil %T is not extendable", p) - } - return extensionAdapter{p}, nil - case extensionsBytes: - return slowExtensionAdapter{p}, nil - } - // Don't allocate a specific error containing %T: - // this is the hot path for Clone and MarshalText. - return nil, errNotExtendable -} - -var errNotExtendable = errors.New("proto: not an extendable proto.Message") - -func isNilPtr(x interface{}) bool { - v := reflect.ValueOf(x) - return v.Kind() == reflect.Ptr && v.IsNil() -} - -// XXX_InternalExtensions is an internal representation of proto extensions. -// -// Each generated message struct type embeds an anonymous XXX_InternalExtensions field, -// thus gaining the unexported 'extensions' method, which can be called only from the proto package. -// -// The methods of XXX_InternalExtensions are not concurrency safe in general, -// but calls to logically read-only methods such as has and get may be executed concurrently. -type XXX_InternalExtensions struct { - // The struct must be indirect so that if a user inadvertently copies a - // generated message and its embedded XXX_InternalExtensions, they - // avoid the mayhem of a copied mutex. - // - // The mutex serializes all logically read-only operations to p.extensionMap. - // It is up to the client to ensure that write operations to p.extensionMap are - // mutually exclusive with other accesses. - p *struct { - mu sync.Mutex - extensionMap map[int32]Extension - } -} - -// extensionsWrite returns the extension map, creating it on first use. -func (e *XXX_InternalExtensions) extensionsWrite() map[int32]Extension { - if e.p == nil { - e.p = new(struct { - mu sync.Mutex - extensionMap map[int32]Extension - }) - e.p.extensionMap = make(map[int32]Extension) - } - return e.p.extensionMap -} - -// extensionsRead returns the extensions map for read-only use. It may be nil. -// The caller must hold the returned mutex's lock when accessing Elements within the map. -func (e *XXX_InternalExtensions) extensionsRead() (map[int32]Extension, sync.Locker) { - if e.p == nil { - return nil, nil - } - return e.p.extensionMap, &e.p.mu -} - -// ExtensionDesc represents an extension specification. -// Used in generated code from the protocol compiler. -type ExtensionDesc struct { - ExtendedType Message // nil pointer to the type that is being extended - ExtensionType interface{} // nil pointer to the extension type - Field int32 // field number - Name string // fully-qualified name of extension, for text formatting - Tag string // protobuf tag style - Filename string // name of the file in which the extension is defined -} - -func (ed *ExtensionDesc) repeated() bool { - t := reflect.TypeOf(ed.ExtensionType) - return t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 -} - -// Extension represents an extension in a message. -type Extension struct { - // When an extension is stored in a message using SetExtension - // only desc and value are set. When the message is marshaled - // enc will be set to the encoded form of the message. - // - // When a message is unmarshaled and contains extensions, each - // extension will have only enc set. When such an extension is - // accessed using GetExtension (or GetExtensions) desc and value - // will be set. - desc *ExtensionDesc - value interface{} - enc []byte -} - -// SetRawExtension is for testing only. -func SetRawExtension(base Message, id int32, b []byte) { - if ebase, ok := base.(extensionsBytes); ok { - clearExtension(base, id) - ext := ebase.GetExtensions() - *ext = append(*ext, b...) - return - } - epb, err := extendable(base) - if err != nil { - return - } - extmap := epb.extensionsWrite() - extmap[id] = Extension{enc: b} -} - -// isExtensionField returns true iff the given field number is in an extension range. -func isExtensionField(pb extendableProto, field int32) bool { - for _, er := range pb.ExtensionRangeArray() { - if er.Start <= field && field <= er.End { - return true - } - } - return false -} - -// checkExtensionTypes checks that the given extension is valid for pb. -func checkExtensionTypes(pb extendableProto, extension *ExtensionDesc) error { - var pbi interface{} = pb - // Check the extended type. - if ea, ok := pbi.(extensionAdapter); ok { - pbi = ea.extendableProtoV1 - } - if ea, ok := pbi.(slowExtensionAdapter); ok { - pbi = ea.extensionsBytes - } - if a, b := reflect.TypeOf(pbi), reflect.TypeOf(extension.ExtendedType); a != b { - return fmt.Errorf("proto: bad extended type; %v does not extend %v", b, a) - } - // Check the range. - if !isExtensionField(pb, extension.Field) { - return errors.New("proto: bad extension number; not in declared ranges") - } - return nil -} - -// extPropKey is sufficient to uniquely identify an extension. -type extPropKey struct { - base reflect.Type - field int32 -} - -var extProp = struct { - sync.RWMutex - m map[extPropKey]*Properties -}{ - m: make(map[extPropKey]*Properties), -} - -func extensionProperties(ed *ExtensionDesc) *Properties { - key := extPropKey{base: reflect.TypeOf(ed.ExtendedType), field: ed.Field} - - extProp.RLock() - if prop, ok := extProp.m[key]; ok { - extProp.RUnlock() - return prop - } - extProp.RUnlock() - - extProp.Lock() - defer extProp.Unlock() - // Check again. - if prop, ok := extProp.m[key]; ok { - return prop - } - - prop := new(Properties) - prop.Init(reflect.TypeOf(ed.ExtensionType), "unknown_name", ed.Tag, nil) - extProp.m[key] = prop - return prop -} - -// HasExtension returns whether the given extension is present in pb. -func HasExtension(pb Message, extension *ExtensionDesc) bool { - if epb, doki := pb.(extensionsBytes); doki { - ext := epb.GetExtensions() - buf := *ext - o := 0 - for o < len(buf) { - tag, n := DecodeVarint(buf[o:]) - fieldNum := int32(tag >> 3) - if int32(fieldNum) == extension.Field { - return true - } - wireType := int(tag & 0x7) - o += n - l, err := size(buf[o:], wireType) - if err != nil { - return false - } - o += l - } - return false - } - // TODO: Check types, field numbers, etc.? - epb, err := extendable(pb) - if err != nil { - return false - } - extmap, mu := epb.extensionsRead() - if extmap == nil { - return false - } - mu.Lock() - _, ok := extmap[extension.Field] - mu.Unlock() - return ok -} - -// ClearExtension removes the given extension from pb. -func ClearExtension(pb Message, extension *ExtensionDesc) { - clearExtension(pb, extension.Field) -} - -func clearExtension(pb Message, fieldNum int32) { - if epb, ok := pb.(extensionsBytes); ok { - offset := 0 - for offset != -1 { - offset = deleteExtension(epb, fieldNum, offset) - } - return - } - epb, err := extendable(pb) - if err != nil { - return - } - // TODO: Check types, field numbers, etc.? - extmap := epb.extensionsWrite() - delete(extmap, fieldNum) -} - -// GetExtension retrieves a proto2 extended field from pb. -// -// If the descriptor is type complete (i.e., ExtensionDesc.ExtensionType is non-nil), -// then GetExtension parses the encoded field and returns a Go value of the specified type. -// If the field is not present, then the default value is returned (if one is specified), -// otherwise ErrMissingExtension is reported. -// -// If the descriptor is not type complete (i.e., ExtensionDesc.ExtensionType is nil), -// then GetExtension returns the raw encoded bytes of the field extension. -func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) { - if epb, doki := pb.(extensionsBytes); doki { - ext := epb.GetExtensions() - return decodeExtensionFromBytes(extension, *ext) - } - - epb, err := extendable(pb) - if err != nil { - return nil, err - } - - if extension.ExtendedType != nil { - // can only check type if this is a complete descriptor - if cerr := checkExtensionTypes(epb, extension); cerr != nil { - return nil, cerr - } - } - - emap, mu := epb.extensionsRead() - if emap == nil { - return defaultExtensionValue(extension) - } - mu.Lock() - defer mu.Unlock() - e, ok := emap[extension.Field] - if !ok { - // defaultExtensionValue returns the default value or - // ErrMissingExtension if there is no default. - return defaultExtensionValue(extension) - } - - if e.value != nil { - // Already decoded. Check the descriptor, though. - if e.desc != extension { - // This shouldn't happen. If it does, it means that - // GetExtension was called twice with two different - // descriptors with the same field number. - return nil, errors.New("proto: descriptor conflict") - } - return e.value, nil - } - - if extension.ExtensionType == nil { - // incomplete descriptor - return e.enc, nil - } - - v, err := decodeExtension(e.enc, extension) - if err != nil { - return nil, err - } - - // Remember the decoded version and drop the encoded version. - // That way it is safe to mutate what we return. - e.value = v - e.desc = extension - e.enc = nil - emap[extension.Field] = e - return e.value, nil -} - -// defaultExtensionValue returns the default value for extension. -// If no default for an extension is defined ErrMissingExtension is returned. -func defaultExtensionValue(extension *ExtensionDesc) (interface{}, error) { - if extension.ExtensionType == nil { - // incomplete descriptor, so no default - return nil, ErrMissingExtension - } - - t := reflect.TypeOf(extension.ExtensionType) - props := extensionProperties(extension) - - sf, _, err := fieldDefault(t, props) - if err != nil { - return nil, err - } - - if sf == nil || sf.value == nil { - // There is no default value. - return nil, ErrMissingExtension - } - - if t.Kind() != reflect.Ptr { - // We do not need to return a Ptr, we can directly return sf.value. - return sf.value, nil - } - - // We need to return an interface{} that is a pointer to sf.value. - value := reflect.New(t).Elem() - value.Set(reflect.New(value.Type().Elem())) - if sf.kind == reflect.Int32 { - // We may have an int32 or an enum, but the underlying data is int32. - // Since we can't set an int32 into a non int32 reflect.value directly - // set it as a int32. - value.Elem().SetInt(int64(sf.value.(int32))) - } else { - value.Elem().Set(reflect.ValueOf(sf.value)) - } - return value.Interface(), nil -} - -// decodeExtension decodes an extension encoded in b. -func decodeExtension(b []byte, extension *ExtensionDesc) (interface{}, error) { - t := reflect.TypeOf(extension.ExtensionType) - unmarshal := typeUnmarshaler(t, extension.Tag) - - // t is a pointer to a struct, pointer to basic type or a slice. - // Allocate space to store the pointer/slice. - value := reflect.New(t).Elem() - - var err error - for { - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - wire := int(x) & 7 - - b, err = unmarshal(b, valToPointer(value.Addr()), wire) - if err != nil { - return nil, err - } - - if len(b) == 0 { - break - } - } - return value.Interface(), nil -} - -// GetExtensions returns a slice of the extensions present in pb that are also listed in es. -// The returned slice has the same length as es; missing extensions will appear as nil elements. -func GetExtensions(pb Message, es []*ExtensionDesc) (extensions []interface{}, err error) { - epb, err := extendable(pb) - if err != nil { - return nil, err - } - extensions = make([]interface{}, len(es)) - for i, e := range es { - extensions[i], err = GetExtension(epb, e) - if err == ErrMissingExtension { - err = nil - } - if err != nil { - return - } - } - return -} - -// ExtensionDescs returns a new slice containing pb's extension descriptors, in undefined order. -// For non-registered extensions, ExtensionDescs returns an incomplete descriptor containing -// just the Field field, which defines the extension's field number. -func ExtensionDescs(pb Message) ([]*ExtensionDesc, error) { - epb, err := extendable(pb) - if err != nil { - return nil, err - } - registeredExtensions := RegisteredExtensions(pb) - - emap, mu := epb.extensionsRead() - if emap == nil { - return nil, nil - } - mu.Lock() - defer mu.Unlock() - extensions := make([]*ExtensionDesc, 0, len(emap)) - for extid, e := range emap { - desc := e.desc - if desc == nil { - desc = registeredExtensions[extid] - if desc == nil { - desc = &ExtensionDesc{Field: extid} - } - } - - extensions = append(extensions, desc) - } - return extensions, nil -} - -// SetExtension sets the specified extension of pb to the specified value. -func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error { - if epb, ok := pb.(extensionsBytes); ok { - newb, err := encodeExtension(extension, value) - if err != nil { - return err - } - bb := epb.GetExtensions() - *bb = append(*bb, newb...) - return nil - } - epb, err := extendable(pb) - if err != nil { - return err - } - if err := checkExtensionTypes(epb, extension); err != nil { - return err - } - typ := reflect.TypeOf(extension.ExtensionType) - if typ != reflect.TypeOf(value) { - return fmt.Errorf("proto: bad extension value type. got: %T, want: %T", value, extension.ExtensionType) - } - // nil extension values need to be caught early, because the - // encoder can't distinguish an ErrNil due to a nil extension - // from an ErrNil due to a missing field. Extensions are - // always optional, so the encoder would just swallow the error - // and drop all the extensions from the encoded message. - if reflect.ValueOf(value).IsNil() { - return fmt.Errorf("proto: SetExtension called with nil value of type %T", value) - } - - extmap := epb.extensionsWrite() - extmap[extension.Field] = Extension{desc: extension, value: value} - return nil -} - -// ClearAllExtensions clears all extensions from pb. -func ClearAllExtensions(pb Message) { - if epb, doki := pb.(extensionsBytes); doki { - ext := epb.GetExtensions() - *ext = []byte{} - return - } - epb, err := extendable(pb) - if err != nil { - return - } - m := epb.extensionsWrite() - for k := range m { - delete(m, k) - } -} - -// A global registry of extensions. -// The generated code will register the generated descriptors by calling RegisterExtension. - -var extensionMaps = make(map[reflect.Type]map[int32]*ExtensionDesc) - -// RegisterExtension is called from the generated code. -func RegisterExtension(desc *ExtensionDesc) { - st := reflect.TypeOf(desc.ExtendedType).Elem() - m := extensionMaps[st] - if m == nil { - m = make(map[int32]*ExtensionDesc) - extensionMaps[st] = m - } - if _, ok := m[desc.Field]; ok { - panic("proto: duplicate extension registered: " + st.String() + " " + strconv.Itoa(int(desc.Field))) - } - m[desc.Field] = desc -} - -// RegisteredExtensions returns a map of the registered extensions of a -// protocol buffer struct, indexed by the extension number. -// The argument pb should be a nil pointer to the struct type. -func RegisteredExtensions(pb Message) map[int32]*ExtensionDesc { - return extensionMaps[reflect.TypeOf(pb).Elem()] -} diff --git a/vendor/github.com/gogo/protobuf/proto/extensions_gogo.go b/vendor/github.com/gogo/protobuf/proto/extensions_gogo.go deleted file mode 100644 index 53ebd8c..0000000 --- a/vendor/github.com/gogo/protobuf/proto/extensions_gogo.go +++ /dev/null @@ -1,368 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "bytes" - "errors" - "fmt" - "io" - "reflect" - "sort" - "strings" - "sync" -) - -type extensionsBytes interface { - Message - ExtensionRangeArray() []ExtensionRange - GetExtensions() *[]byte -} - -type slowExtensionAdapter struct { - extensionsBytes -} - -func (s slowExtensionAdapter) extensionsWrite() map[int32]Extension { - panic("Please report a bug to github.com/gogo/protobuf if you see this message: Writing extensions is not supported for extensions stored in a byte slice field.") -} - -func (s slowExtensionAdapter) extensionsRead() (map[int32]Extension, sync.Locker) { - b := s.GetExtensions() - m, err := BytesToExtensionsMap(*b) - if err != nil { - panic(err) - } - return m, notLocker{} -} - -func GetBoolExtension(pb Message, extension *ExtensionDesc, ifnotset bool) bool { - if reflect.ValueOf(pb).IsNil() { - return ifnotset - } - value, err := GetExtension(pb, extension) - if err != nil { - return ifnotset - } - if value == nil { - return ifnotset - } - if value.(*bool) == nil { - return ifnotset - } - return *(value.(*bool)) -} - -func (this *Extension) Equal(that *Extension) bool { - if err := this.Encode(); err != nil { - return false - } - if err := that.Encode(); err != nil { - return false - } - return bytes.Equal(this.enc, that.enc) -} - -func (this *Extension) Compare(that *Extension) int { - if err := this.Encode(); err != nil { - return 1 - } - if err := that.Encode(); err != nil { - return -1 - } - return bytes.Compare(this.enc, that.enc) -} - -func SizeOfInternalExtension(m extendableProto) (n int) { - info := getMarshalInfo(reflect.TypeOf(m)) - return info.sizeV1Extensions(m.extensionsWrite()) -} - -type sortableMapElem struct { - field int32 - ext Extension -} - -func newSortableExtensionsFromMap(m map[int32]Extension) sortableExtensions { - s := make(sortableExtensions, 0, len(m)) - for k, v := range m { - s = append(s, &sortableMapElem{field: k, ext: v}) - } - return s -} - -type sortableExtensions []*sortableMapElem - -func (this sortableExtensions) Len() int { return len(this) } - -func (this sortableExtensions) Swap(i, j int) { this[i], this[j] = this[j], this[i] } - -func (this sortableExtensions) Less(i, j int) bool { return this[i].field < this[j].field } - -func (this sortableExtensions) String() string { - sort.Sort(this) - ss := make([]string, len(this)) - for i := range this { - ss[i] = fmt.Sprintf("%d: %v", this[i].field, this[i].ext) - } - return "map[" + strings.Join(ss, ",") + "]" -} - -func StringFromInternalExtension(m extendableProto) string { - return StringFromExtensionsMap(m.extensionsWrite()) -} - -func StringFromExtensionsMap(m map[int32]Extension) string { - return newSortableExtensionsFromMap(m).String() -} - -func StringFromExtensionsBytes(ext []byte) string { - m, err := BytesToExtensionsMap(ext) - if err != nil { - panic(err) - } - return StringFromExtensionsMap(m) -} - -func EncodeInternalExtension(m extendableProto, data []byte) (n int, err error) { - return EncodeExtensionMap(m.extensionsWrite(), data) -} - -func EncodeExtensionMap(m map[int32]Extension, data []byte) (n int, err error) { - o := 0 - for _, e := range m { - if err := e.Encode(); err != nil { - return 0, err - } - n := copy(data[o:], e.enc) - if n != len(e.enc) { - return 0, io.ErrShortBuffer - } - o += n - } - return o, nil -} - -func GetRawExtension(m map[int32]Extension, id int32) ([]byte, error) { - e := m[id] - if err := e.Encode(); err != nil { - return nil, err - } - return e.enc, nil -} - -func size(buf []byte, wire int) (int, error) { - switch wire { - case WireVarint: - _, n := DecodeVarint(buf) - return n, nil - case WireFixed64: - return 8, nil - case WireBytes: - v, n := DecodeVarint(buf) - return int(v) + n, nil - case WireFixed32: - return 4, nil - case WireStartGroup: - offset := 0 - for { - u, n := DecodeVarint(buf[offset:]) - fwire := int(u & 0x7) - offset += n - if fwire == WireEndGroup { - return offset, nil - } - s, err := size(buf[offset:], wire) - if err != nil { - return 0, err - } - offset += s - } - } - return 0, fmt.Errorf("proto: can't get size for unknown wire type %d", wire) -} - -func BytesToExtensionsMap(buf []byte) (map[int32]Extension, error) { - m := make(map[int32]Extension) - i := 0 - for i < len(buf) { - tag, n := DecodeVarint(buf[i:]) - if n <= 0 { - return nil, fmt.Errorf("unable to decode varint") - } - fieldNum := int32(tag >> 3) - wireType := int(tag & 0x7) - l, err := size(buf[i+n:], wireType) - if err != nil { - return nil, err - } - end := i + int(l) + n - m[int32(fieldNum)] = Extension{enc: buf[i:end]} - i = end - } - return m, nil -} - -func NewExtension(e []byte) Extension { - ee := Extension{enc: make([]byte, len(e))} - copy(ee.enc, e) - return ee -} - -func AppendExtension(e Message, tag int32, buf []byte) { - if ee, eok := e.(extensionsBytes); eok { - ext := ee.GetExtensions() - *ext = append(*ext, buf...) - return - } - if ee, eok := e.(extendableProto); eok { - m := ee.extensionsWrite() - ext := m[int32(tag)] // may be missing - ext.enc = append(ext.enc, buf...) - m[int32(tag)] = ext - } -} - -func encodeExtension(extension *ExtensionDesc, value interface{}) ([]byte, error) { - u := getMarshalInfo(reflect.TypeOf(extension.ExtendedType)) - ei := u.getExtElemInfo(extension) - v := value - p := toAddrPointer(&v, ei.isptr) - siz := ei.sizer(p, SizeVarint(ei.wiretag)) - buf := make([]byte, 0, siz) - return ei.marshaler(buf, p, ei.wiretag, false) -} - -func decodeExtensionFromBytes(extension *ExtensionDesc, buf []byte) (interface{}, error) { - o := 0 - for o < len(buf) { - tag, n := DecodeVarint((buf)[o:]) - fieldNum := int32(tag >> 3) - wireType := int(tag & 0x7) - if o+n > len(buf) { - return nil, fmt.Errorf("unable to decode extension") - } - l, err := size((buf)[o+n:], wireType) - if err != nil { - return nil, err - } - if int32(fieldNum) == extension.Field { - if o+n+l > len(buf) { - return nil, fmt.Errorf("unable to decode extension") - } - v, err := decodeExtension((buf)[o:o+n+l], extension) - if err != nil { - return nil, err - } - return v, nil - } - o += n + l - } - return defaultExtensionValue(extension) -} - -func (this *Extension) Encode() error { - if this.enc == nil { - var err error - this.enc, err = encodeExtension(this.desc, this.value) - if err != nil { - return err - } - } - return nil -} - -func (this Extension) GoString() string { - if err := this.Encode(); err != nil { - return fmt.Sprintf("error encoding extension: %v", err) - } - return fmt.Sprintf("proto.NewExtension(%#v)", this.enc) -} - -func SetUnsafeExtension(pb Message, fieldNum int32, value interface{}) error { - typ := reflect.TypeOf(pb).Elem() - ext, ok := extensionMaps[typ] - if !ok { - return fmt.Errorf("proto: bad extended type; %s is not extendable", typ.String()) - } - desc, ok := ext[fieldNum] - if !ok { - return errors.New("proto: bad extension number; not in declared ranges") - } - return SetExtension(pb, desc, value) -} - -func GetUnsafeExtension(pb Message, fieldNum int32) (interface{}, error) { - typ := reflect.TypeOf(pb).Elem() - ext, ok := extensionMaps[typ] - if !ok { - return nil, fmt.Errorf("proto: bad extended type; %s is not extendable", typ.String()) - } - desc, ok := ext[fieldNum] - if !ok { - return nil, fmt.Errorf("unregistered field number %d", fieldNum) - } - return GetExtension(pb, desc) -} - -func NewUnsafeXXX_InternalExtensions(m map[int32]Extension) XXX_InternalExtensions { - x := &XXX_InternalExtensions{ - p: new(struct { - mu sync.Mutex - extensionMap map[int32]Extension - }), - } - x.p.extensionMap = m - return *x -} - -func GetUnsafeExtensionsMap(extendable Message) map[int32]Extension { - pb := extendable.(extendableProto) - return pb.extensionsWrite() -} - -func deleteExtension(pb extensionsBytes, theFieldNum int32, offset int) int { - ext := pb.GetExtensions() - for offset < len(*ext) { - tag, n1 := DecodeVarint((*ext)[offset:]) - fieldNum := int32(tag >> 3) - wireType := int(tag & 0x7) - n2, err := size((*ext)[offset+n1:], wireType) - if err != nil { - panic(err) - } - newOffset := offset + n1 + n2 - if fieldNum == theFieldNum { - *ext = append((*ext)[:offset], (*ext)[newOffset:]...) - return offset - } - offset = newOffset - } - return -1 -} diff --git a/vendor/github.com/gogo/protobuf/proto/lib.go b/vendor/github.com/gogo/protobuf/proto/lib.go deleted file mode 100644 index d17f802..0000000 --- a/vendor/github.com/gogo/protobuf/proto/lib.go +++ /dev/null @@ -1,967 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* -Package proto converts data structures to and from the wire format of -protocol buffers. It works in concert with the Go source code generated -for .proto files by the protocol compiler. - -A summary of the properties of the protocol buffer interface -for a protocol buffer variable v: - - - Names are turned from camel_case to CamelCase for export. - - There are no methods on v to set fields; just treat - them as structure fields. - - There are getters that return a field's value if set, - and return the field's default value if unset. - The getters work even if the receiver is a nil message. - - The zero value for a struct is its correct initialization state. - All desired fields must be set before marshaling. - - A Reset() method will restore a protobuf struct to its zero state. - - Non-repeated fields are pointers to the values; nil means unset. - That is, optional or required field int32 f becomes F *int32. - - Repeated fields are slices. - - Helper functions are available to aid the setting of fields. - msg.Foo = proto.String("hello") // set field - - Constants are defined to hold the default values of all fields that - have them. They have the form Default_StructName_FieldName. - Because the getter methods handle defaulted values, - direct use of these constants should be rare. - - Enums are given type names and maps from names to values. - Enum values are prefixed by the enclosing message's name, or by the - enum's type name if it is a top-level enum. Enum types have a String - method, and a Enum method to assist in message construction. - - Nested messages, groups and enums have type names prefixed with the name of - the surrounding message type. - - Extensions are given descriptor names that start with E_, - followed by an underscore-delimited list of the nested messages - that contain it (if any) followed by the CamelCased name of the - extension field itself. HasExtension, ClearExtension, GetExtension - and SetExtension are functions for manipulating extensions. - - Oneof field sets are given a single field in their message, - with distinguished wrapper types for each possible field value. - - Marshal and Unmarshal are functions to encode and decode the wire format. - -When the .proto file specifies `syntax="proto3"`, there are some differences: - - - Non-repeated fields of non-message type are values instead of pointers. - - Enum types do not get an Enum method. - -The simplest way to describe this is to see an example. -Given file test.proto, containing - - package example; - - enum FOO { X = 17; } - - message Test { - required string label = 1; - optional int32 type = 2 [default=77]; - repeated int64 reps = 3; - optional group OptionalGroup = 4 { - required string RequiredField = 5; - } - oneof union { - int32 number = 6; - string name = 7; - } - } - -The resulting file, test.pb.go, is: - - package example - - import proto "github.com/gogo/protobuf/proto" - import math "math" - - type FOO int32 - const ( - FOO_X FOO = 17 - ) - var FOO_name = map[int32]string{ - 17: "X", - } - var FOO_value = map[string]int32{ - "X": 17, - } - - func (x FOO) Enum() *FOO { - p := new(FOO) - *p = x - return p - } - func (x FOO) String() string { - return proto.EnumName(FOO_name, int32(x)) - } - func (x *FOO) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FOO_value, data) - if err != nil { - return err - } - *x = FOO(value) - return nil - } - - type Test struct { - Label *string `protobuf:"bytes,1,req,name=label" json:"label,omitempty"` - Type *int32 `protobuf:"varint,2,opt,name=type,def=77" json:"type,omitempty"` - Reps []int64 `protobuf:"varint,3,rep,name=reps" json:"reps,omitempty"` - Optionalgroup *Test_OptionalGroup `protobuf:"group,4,opt,name=OptionalGroup" json:"optionalgroup,omitempty"` - // Types that are valid to be assigned to Union: - // *Test_Number - // *Test_Name - Union isTest_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` - } - func (m *Test) Reset() { *m = Test{} } - func (m *Test) String() string { return proto.CompactTextString(m) } - func (*Test) ProtoMessage() {} - - type isTest_Union interface { - isTest_Union() - } - - type Test_Number struct { - Number int32 `protobuf:"varint,6,opt,name=number"` - } - type Test_Name struct { - Name string `protobuf:"bytes,7,opt,name=name"` - } - - func (*Test_Number) isTest_Union() {} - func (*Test_Name) isTest_Union() {} - - func (m *Test) GetUnion() isTest_Union { - if m != nil { - return m.Union - } - return nil - } - const Default_Test_Type int32 = 77 - - func (m *Test) GetLabel() string { - if m != nil && m.Label != nil { - return *m.Label - } - return "" - } - - func (m *Test) GetType() int32 { - if m != nil && m.Type != nil { - return *m.Type - } - return Default_Test_Type - } - - func (m *Test) GetOptionalgroup() *Test_OptionalGroup { - if m != nil { - return m.Optionalgroup - } - return nil - } - - type Test_OptionalGroup struct { - RequiredField *string `protobuf:"bytes,5,req" json:"RequiredField,omitempty"` - } - func (m *Test_OptionalGroup) Reset() { *m = Test_OptionalGroup{} } - func (m *Test_OptionalGroup) String() string { return proto.CompactTextString(m) } - - func (m *Test_OptionalGroup) GetRequiredField() string { - if m != nil && m.RequiredField != nil { - return *m.RequiredField - } - return "" - } - - func (m *Test) GetNumber() int32 { - if x, ok := m.GetUnion().(*Test_Number); ok { - return x.Number - } - return 0 - } - - func (m *Test) GetName() string { - if x, ok := m.GetUnion().(*Test_Name); ok { - return x.Name - } - return "" - } - - func init() { - proto.RegisterEnum("example.FOO", FOO_name, FOO_value) - } - -To create and play with a Test object: - - package main - - import ( - "log" - - "github.com/gogo/protobuf/proto" - pb "./example.pb" - ) - - func main() { - test := &pb.Test{ - Label: proto.String("hello"), - Type: proto.Int32(17), - Reps: []int64{1, 2, 3}, - Optionalgroup: &pb.Test_OptionalGroup{ - RequiredField: proto.String("good bye"), - }, - Union: &pb.Test_Name{"fred"}, - } - data, err := proto.Marshal(test) - if err != nil { - log.Fatal("marshaling error: ", err) - } - newTest := &pb.Test{} - err = proto.Unmarshal(data, newTest) - if err != nil { - log.Fatal("unmarshaling error: ", err) - } - // Now test and newTest contain the same data. - if test.GetLabel() != newTest.GetLabel() { - log.Fatalf("data mismatch %q != %q", test.GetLabel(), newTest.GetLabel()) - } - // Use a type switch to determine which oneof was set. - switch u := test.Union.(type) { - case *pb.Test_Number: // u.Number contains the number. - case *pb.Test_Name: // u.Name contains the string. - } - // etc. - } -*/ -package proto - -import ( - "encoding/json" - "fmt" - "log" - "reflect" - "sort" - "strconv" - "sync" -) - -// RequiredNotSetError is an error type returned by either Marshal or Unmarshal. -// Marshal reports this when a required field is not initialized. -// Unmarshal reports this when a required field is missing from the wire data. -type RequiredNotSetError struct{ field string } - -func (e *RequiredNotSetError) Error() string { - if e.field == "" { - return fmt.Sprintf("proto: required field not set") - } - return fmt.Sprintf("proto: required field %q not set", e.field) -} -func (e *RequiredNotSetError) RequiredNotSet() bool { - return true -} - -type invalidUTF8Error struct{ field string } - -func (e *invalidUTF8Error) Error() string { - if e.field == "" { - return "proto: invalid UTF-8 detected" - } - return fmt.Sprintf("proto: field %q contains invalid UTF-8", e.field) -} -func (e *invalidUTF8Error) InvalidUTF8() bool { - return true -} - -// errInvalidUTF8 is a sentinel error to identify fields with invalid UTF-8. -// This error should not be exposed to the external API as such errors should -// be recreated with the field information. -var errInvalidUTF8 = &invalidUTF8Error{} - -// isNonFatal reports whether the error is either a RequiredNotSet error -// or a InvalidUTF8 error. -func isNonFatal(err error) bool { - if re, ok := err.(interface{ RequiredNotSet() bool }); ok && re.RequiredNotSet() { - return true - } - if re, ok := err.(interface{ InvalidUTF8() bool }); ok && re.InvalidUTF8() { - return true - } - return false -} - -type nonFatal struct{ E error } - -// Merge merges err into nf and reports whether it was successful. -// Otherwise it returns false for any fatal non-nil errors. -func (nf *nonFatal) Merge(err error) (ok bool) { - if err == nil { - return true // not an error - } - if !isNonFatal(err) { - return false // fatal error - } - if nf.E == nil { - nf.E = err // store first instance of non-fatal error - } - return true -} - -// Message is implemented by generated protocol buffer messages. -type Message interface { - Reset() - String() string - ProtoMessage() -} - -// A Buffer is a buffer manager for marshaling and unmarshaling -// protocol buffers. It may be reused between invocations to -// reduce memory usage. It is not necessary to use a Buffer; -// the global functions Marshal and Unmarshal create a -// temporary Buffer and are fine for most applications. -type Buffer struct { - buf []byte // encode/decode byte stream - index int // read point - - deterministic bool -} - -// NewBuffer allocates a new Buffer and initializes its internal data to -// the contents of the argument slice. -func NewBuffer(e []byte) *Buffer { - return &Buffer{buf: e} -} - -// Reset resets the Buffer, ready for marshaling a new protocol buffer. -func (p *Buffer) Reset() { - p.buf = p.buf[0:0] // for reading/writing - p.index = 0 // for reading -} - -// SetBuf replaces the internal buffer with the slice, -// ready for unmarshaling the contents of the slice. -func (p *Buffer) SetBuf(s []byte) { - p.buf = s - p.index = 0 -} - -// Bytes returns the contents of the Buffer. -func (p *Buffer) Bytes() []byte { return p.buf } - -// SetDeterministic sets whether to use deterministic serialization. -// -// Deterministic serialization guarantees that for a given binary, equal -// messages will always be serialized to the same bytes. This implies: -// -// - Repeated serialization of a message will return the same bytes. -// - Different processes of the same binary (which may be executing on -// different machines) will serialize equal messages to the same bytes. -// -// Note that the deterministic serialization is NOT canonical across -// languages. It is not guaranteed to remain stable over time. It is unstable -// across different builds with schema changes due to unknown fields. -// Users who need canonical serialization (e.g., persistent storage in a -// canonical form, fingerprinting, etc.) should define their own -// canonicalization specification and implement their own serializer rather -// than relying on this API. -// -// If deterministic serialization is requested, map entries will be sorted -// by keys in lexographical order. This is an implementation detail and -// subject to change. -func (p *Buffer) SetDeterministic(deterministic bool) { - p.deterministic = deterministic -} - -/* - * Helper routines for simplifying the creation of optional fields of basic type. - */ - -// Bool is a helper routine that allocates a new bool value -// to store v and returns a pointer to it. -func Bool(v bool) *bool { - return &v -} - -// Int32 is a helper routine that allocates a new int32 value -// to store v and returns a pointer to it. -func Int32(v int32) *int32 { - return &v -} - -// Int is a helper routine that allocates a new int32 value -// to store v and returns a pointer to it, but unlike Int32 -// its argument value is an int. -func Int(v int) *int32 { - p := new(int32) - *p = int32(v) - return p -} - -// Int64 is a helper routine that allocates a new int64 value -// to store v and returns a pointer to it. -func Int64(v int64) *int64 { - return &v -} - -// Float32 is a helper routine that allocates a new float32 value -// to store v and returns a pointer to it. -func Float32(v float32) *float32 { - return &v -} - -// Float64 is a helper routine that allocates a new float64 value -// to store v and returns a pointer to it. -func Float64(v float64) *float64 { - return &v -} - -// Uint32 is a helper routine that allocates a new uint32 value -// to store v and returns a pointer to it. -func Uint32(v uint32) *uint32 { - return &v -} - -// Uint64 is a helper routine that allocates a new uint64 value -// to store v and returns a pointer to it. -func Uint64(v uint64) *uint64 { - return &v -} - -// String is a helper routine that allocates a new string value -// to store v and returns a pointer to it. -func String(v string) *string { - return &v -} - -// EnumName is a helper function to simplify printing protocol buffer enums -// by name. Given an enum map and a value, it returns a useful string. -func EnumName(m map[int32]string, v int32) string { - s, ok := m[v] - if ok { - return s - } - return strconv.Itoa(int(v)) -} - -// UnmarshalJSONEnum is a helper function to simplify recovering enum int values -// from their JSON-encoded representation. Given a map from the enum's symbolic -// names to its int values, and a byte buffer containing the JSON-encoded -// value, it returns an int32 that can be cast to the enum type by the caller. -// -// The function can deal with both JSON representations, numeric and symbolic. -func UnmarshalJSONEnum(m map[string]int32, data []byte, enumName string) (int32, error) { - if data[0] == '"' { - // New style: enums are strings. - var repr string - if err := json.Unmarshal(data, &repr); err != nil { - return -1, err - } - val, ok := m[repr] - if !ok { - return 0, fmt.Errorf("unrecognized enum %s value %q", enumName, repr) - } - return val, nil - } - // Old style: enums are ints. - var val int32 - if err := json.Unmarshal(data, &val); err != nil { - return 0, fmt.Errorf("cannot unmarshal %#q into enum %s", data, enumName) - } - return val, nil -} - -// DebugPrint dumps the encoded data in b in a debugging format with a header -// including the string s. Used in testing but made available for general debugging. -func (p *Buffer) DebugPrint(s string, b []byte) { - var u uint64 - - obuf := p.buf - sindex := p.index - p.buf = b - p.index = 0 - depth := 0 - - fmt.Printf("\n--- %s ---\n", s) - -out: - for { - for i := 0; i < depth; i++ { - fmt.Print(" ") - } - - index := p.index - if index == len(p.buf) { - break - } - - op, err := p.DecodeVarint() - if err != nil { - fmt.Printf("%3d: fetching op err %v\n", index, err) - break out - } - tag := op >> 3 - wire := op & 7 - - switch wire { - default: - fmt.Printf("%3d: t=%3d unknown wire=%d\n", - index, tag, wire) - break out - - case WireBytes: - var r []byte - - r, err = p.DecodeRawBytes(false) - if err != nil { - break out - } - fmt.Printf("%3d: t=%3d bytes [%d]", index, tag, len(r)) - if len(r) <= 6 { - for i := 0; i < len(r); i++ { - fmt.Printf(" %.2x", r[i]) - } - } else { - for i := 0; i < 3; i++ { - fmt.Printf(" %.2x", r[i]) - } - fmt.Printf(" ..") - for i := len(r) - 3; i < len(r); i++ { - fmt.Printf(" %.2x", r[i]) - } - } - fmt.Printf("\n") - - case WireFixed32: - u, err = p.DecodeFixed32() - if err != nil { - fmt.Printf("%3d: t=%3d fix32 err %v\n", index, tag, err) - break out - } - fmt.Printf("%3d: t=%3d fix32 %d\n", index, tag, u) - - case WireFixed64: - u, err = p.DecodeFixed64() - if err != nil { - fmt.Printf("%3d: t=%3d fix64 err %v\n", index, tag, err) - break out - } - fmt.Printf("%3d: t=%3d fix64 %d\n", index, tag, u) - - case WireVarint: - u, err = p.DecodeVarint() - if err != nil { - fmt.Printf("%3d: t=%3d varint err %v\n", index, tag, err) - break out - } - fmt.Printf("%3d: t=%3d varint %d\n", index, tag, u) - - case WireStartGroup: - fmt.Printf("%3d: t=%3d start\n", index, tag) - depth++ - - case WireEndGroup: - depth-- - fmt.Printf("%3d: t=%3d end\n", index, tag) - } - } - - if depth != 0 { - fmt.Printf("%3d: start-end not balanced %d\n", p.index, depth) - } - fmt.Printf("\n") - - p.buf = obuf - p.index = sindex -} - -// SetDefaults sets unset protocol buffer fields to their default values. -// It only modifies fields that are both unset and have defined defaults. -// It recursively sets default values in any non-nil sub-messages. -func SetDefaults(pb Message) { - setDefaults(reflect.ValueOf(pb), true, false) -} - -// v is a struct. -func setDefaults(v reflect.Value, recur, zeros bool) { - if v.Kind() == reflect.Ptr { - v = v.Elem() - } - - defaultMu.RLock() - dm, ok := defaults[v.Type()] - defaultMu.RUnlock() - if !ok { - dm = buildDefaultMessage(v.Type()) - defaultMu.Lock() - defaults[v.Type()] = dm - defaultMu.Unlock() - } - - for _, sf := range dm.scalars { - f := v.Field(sf.index) - if !f.IsNil() { - // field already set - continue - } - dv := sf.value - if dv == nil && !zeros { - // no explicit default, and don't want to set zeros - continue - } - fptr := f.Addr().Interface() // **T - // TODO: Consider batching the allocations we do here. - switch sf.kind { - case reflect.Bool: - b := new(bool) - if dv != nil { - *b = dv.(bool) - } - *(fptr.(**bool)) = b - case reflect.Float32: - f := new(float32) - if dv != nil { - *f = dv.(float32) - } - *(fptr.(**float32)) = f - case reflect.Float64: - f := new(float64) - if dv != nil { - *f = dv.(float64) - } - *(fptr.(**float64)) = f - case reflect.Int32: - // might be an enum - if ft := f.Type(); ft != int32PtrType { - // enum - f.Set(reflect.New(ft.Elem())) - if dv != nil { - f.Elem().SetInt(int64(dv.(int32))) - } - } else { - // int32 field - i := new(int32) - if dv != nil { - *i = dv.(int32) - } - *(fptr.(**int32)) = i - } - case reflect.Int64: - i := new(int64) - if dv != nil { - *i = dv.(int64) - } - *(fptr.(**int64)) = i - case reflect.String: - s := new(string) - if dv != nil { - *s = dv.(string) - } - *(fptr.(**string)) = s - case reflect.Uint8: - // exceptional case: []byte - var b []byte - if dv != nil { - db := dv.([]byte) - b = make([]byte, len(db)) - copy(b, db) - } else { - b = []byte{} - } - *(fptr.(*[]byte)) = b - case reflect.Uint32: - u := new(uint32) - if dv != nil { - *u = dv.(uint32) - } - *(fptr.(**uint32)) = u - case reflect.Uint64: - u := new(uint64) - if dv != nil { - *u = dv.(uint64) - } - *(fptr.(**uint64)) = u - default: - log.Printf("proto: can't set default for field %v (sf.kind=%v)", f, sf.kind) - } - } - - for _, ni := range dm.nested { - f := v.Field(ni) - // f is *T or T or []*T or []T - switch f.Kind() { - case reflect.Struct: - setDefaults(f, recur, zeros) - - case reflect.Ptr: - if f.IsNil() { - continue - } - setDefaults(f, recur, zeros) - - case reflect.Slice: - for i := 0; i < f.Len(); i++ { - e := f.Index(i) - if e.Kind() == reflect.Ptr && e.IsNil() { - continue - } - setDefaults(e, recur, zeros) - } - - case reflect.Map: - for _, k := range f.MapKeys() { - e := f.MapIndex(k) - if e.IsNil() { - continue - } - setDefaults(e, recur, zeros) - } - } - } -} - -var ( - // defaults maps a protocol buffer struct type to a slice of the fields, - // with its scalar fields set to their proto-declared non-zero default values. - defaultMu sync.RWMutex - defaults = make(map[reflect.Type]defaultMessage) - - int32PtrType = reflect.TypeOf((*int32)(nil)) -) - -// defaultMessage represents information about the default values of a message. -type defaultMessage struct { - scalars []scalarField - nested []int // struct field index of nested messages -} - -type scalarField struct { - index int // struct field index - kind reflect.Kind // element type (the T in *T or []T) - value interface{} // the proto-declared default value, or nil -} - -// t is a struct type. -func buildDefaultMessage(t reflect.Type) (dm defaultMessage) { - sprop := GetProperties(t) - for _, prop := range sprop.Prop { - fi, ok := sprop.decoderTags.get(prop.Tag) - if !ok { - // XXX_unrecognized - continue - } - ft := t.Field(fi).Type - - sf, nested, err := fieldDefault(ft, prop) - switch { - case err != nil: - log.Print(err) - case nested: - dm.nested = append(dm.nested, fi) - case sf != nil: - sf.index = fi - dm.scalars = append(dm.scalars, *sf) - } - } - - return dm -} - -// fieldDefault returns the scalarField for field type ft. -// sf will be nil if the field can not have a default. -// nestedMessage will be true if this is a nested message. -// Note that sf.index is not set on return. -func fieldDefault(ft reflect.Type, prop *Properties) (sf *scalarField, nestedMessage bool, err error) { - var canHaveDefault bool - switch ft.Kind() { - case reflect.Struct: - nestedMessage = true // non-nullable - - case reflect.Ptr: - if ft.Elem().Kind() == reflect.Struct { - nestedMessage = true - } else { - canHaveDefault = true // proto2 scalar field - } - - case reflect.Slice: - switch ft.Elem().Kind() { - case reflect.Ptr, reflect.Struct: - nestedMessage = true // repeated message - case reflect.Uint8: - canHaveDefault = true // bytes field - } - - case reflect.Map: - if ft.Elem().Kind() == reflect.Ptr { - nestedMessage = true // map with message values - } - } - - if !canHaveDefault { - if nestedMessage { - return nil, true, nil - } - return nil, false, nil - } - - // We now know that ft is a pointer or slice. - sf = &scalarField{kind: ft.Elem().Kind()} - - // scalar fields without defaults - if !prop.HasDefault { - return sf, false, nil - } - - // a scalar field: either *T or []byte - switch ft.Elem().Kind() { - case reflect.Bool: - x, err := strconv.ParseBool(prop.Default) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default bool %q: %v", prop.Default, err) - } - sf.value = x - case reflect.Float32: - x, err := strconv.ParseFloat(prop.Default, 32) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default float32 %q: %v", prop.Default, err) - } - sf.value = float32(x) - case reflect.Float64: - x, err := strconv.ParseFloat(prop.Default, 64) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default float64 %q: %v", prop.Default, err) - } - sf.value = x - case reflect.Int32: - x, err := strconv.ParseInt(prop.Default, 10, 32) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default int32 %q: %v", prop.Default, err) - } - sf.value = int32(x) - case reflect.Int64: - x, err := strconv.ParseInt(prop.Default, 10, 64) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default int64 %q: %v", prop.Default, err) - } - sf.value = x - case reflect.String: - sf.value = prop.Default - case reflect.Uint8: - // []byte (not *uint8) - sf.value = []byte(prop.Default) - case reflect.Uint32: - x, err := strconv.ParseUint(prop.Default, 10, 32) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default uint32 %q: %v", prop.Default, err) - } - sf.value = uint32(x) - case reflect.Uint64: - x, err := strconv.ParseUint(prop.Default, 10, 64) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default uint64 %q: %v", prop.Default, err) - } - sf.value = x - default: - return nil, false, fmt.Errorf("proto: unhandled def kind %v", ft.Elem().Kind()) - } - - return sf, false, nil -} - -// mapKeys returns a sort.Interface to be used for sorting the map keys. -// Map fields may have key types of non-float scalars, strings and enums. -func mapKeys(vs []reflect.Value) sort.Interface { - s := mapKeySorter{vs: vs} - - // Type specialization per https://developers.google.com/protocol-buffers/docs/proto#maps. - if len(vs) == 0 { - return s - } - switch vs[0].Kind() { - case reflect.Int32, reflect.Int64: - s.less = func(a, b reflect.Value) bool { return a.Int() < b.Int() } - case reflect.Uint32, reflect.Uint64: - s.less = func(a, b reflect.Value) bool { return a.Uint() < b.Uint() } - case reflect.Bool: - s.less = func(a, b reflect.Value) bool { return !a.Bool() && b.Bool() } // false < true - case reflect.String: - s.less = func(a, b reflect.Value) bool { return a.String() < b.String() } - default: - panic(fmt.Sprintf("unsupported map key type: %v", vs[0].Kind())) - } - - return s -} - -type mapKeySorter struct { - vs []reflect.Value - less func(a, b reflect.Value) bool -} - -func (s mapKeySorter) Len() int { return len(s.vs) } -func (s mapKeySorter) Swap(i, j int) { s.vs[i], s.vs[j] = s.vs[j], s.vs[i] } -func (s mapKeySorter) Less(i, j int) bool { - return s.less(s.vs[i], s.vs[j]) -} - -// isProto3Zero reports whether v is a zero proto3 value. -func isProto3Zero(v reflect.Value) bool { - switch v.Kind() { - case reflect.Bool: - return !v.Bool() - case reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint32, reflect.Uint64: - return v.Uint() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.String: - return v.String() == "" - } - return false -} - -// ProtoPackageIsVersion2 is referenced from generated protocol buffer files -// to assert that that code is compatible with this version of the proto package. -const GoGoProtoPackageIsVersion2 = true - -// ProtoPackageIsVersion1 is referenced from generated protocol buffer files -// to assert that that code is compatible with this version of the proto package. -const GoGoProtoPackageIsVersion1 = true - -// InternalMessageInfo is a type used internally by generated .pb.go files. -// This type is not intended to be used by non-generated code. -// This type is not subject to any compatibility guarantee. -type InternalMessageInfo struct { - marshal *marshalInfo - unmarshal *unmarshalInfo - merge *mergeInfo - discard *discardInfo -} diff --git a/vendor/github.com/gogo/protobuf/proto/lib_gogo.go b/vendor/github.com/gogo/protobuf/proto/lib_gogo.go deleted file mode 100644 index b3aa391..0000000 --- a/vendor/github.com/gogo/protobuf/proto/lib_gogo.go +++ /dev/null @@ -1,50 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "encoding/json" - "strconv" -) - -type Sizer interface { - Size() int -} - -type ProtoSizer interface { - ProtoSize() int -} - -func MarshalJSONEnum(m map[int32]string, value int32) ([]byte, error) { - s, ok := m[value] - if !ok { - s = strconv.Itoa(int(value)) - } - return json.Marshal(s) -} diff --git a/vendor/github.com/gogo/protobuf/proto/message_set.go b/vendor/github.com/gogo/protobuf/proto/message_set.go deleted file mode 100644 index f48a756..0000000 --- a/vendor/github.com/gogo/protobuf/proto/message_set.go +++ /dev/null @@ -1,181 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Support for message sets. - */ - -import ( - "errors" -) - -// errNoMessageTypeID occurs when a protocol buffer does not have a message type ID. -// A message type ID is required for storing a protocol buffer in a message set. -var errNoMessageTypeID = errors.New("proto does not have a message type ID") - -// The first two types (_MessageSet_Item and messageSet) -// model what the protocol compiler produces for the following protocol message: -// message MessageSet { -// repeated group Item = 1 { -// required int32 type_id = 2; -// required string message = 3; -// }; -// } -// That is the MessageSet wire format. We can't use a proto to generate these -// because that would introduce a circular dependency between it and this package. - -type _MessageSet_Item struct { - TypeId *int32 `protobuf:"varint,2,req,name=type_id"` - Message []byte `protobuf:"bytes,3,req,name=message"` -} - -type messageSet struct { - Item []*_MessageSet_Item `protobuf:"group,1,rep"` - XXX_unrecognized []byte - // TODO: caching? -} - -// Make sure messageSet is a Message. -var _ Message = (*messageSet)(nil) - -// messageTypeIder is an interface satisfied by a protocol buffer type -// that may be stored in a MessageSet. -type messageTypeIder interface { - MessageTypeId() int32 -} - -func (ms *messageSet) find(pb Message) *_MessageSet_Item { - mti, ok := pb.(messageTypeIder) - if !ok { - return nil - } - id := mti.MessageTypeId() - for _, item := range ms.Item { - if *item.TypeId == id { - return item - } - } - return nil -} - -func (ms *messageSet) Has(pb Message) bool { - return ms.find(pb) != nil -} - -func (ms *messageSet) Unmarshal(pb Message) error { - if item := ms.find(pb); item != nil { - return Unmarshal(item.Message, pb) - } - if _, ok := pb.(messageTypeIder); !ok { - return errNoMessageTypeID - } - return nil // TODO: return error instead? -} - -func (ms *messageSet) Marshal(pb Message) error { - msg, err := Marshal(pb) - if err != nil { - return err - } - if item := ms.find(pb); item != nil { - // reuse existing item - item.Message = msg - return nil - } - - mti, ok := pb.(messageTypeIder) - if !ok { - return errNoMessageTypeID - } - - mtid := mti.MessageTypeId() - ms.Item = append(ms.Item, &_MessageSet_Item{ - TypeId: &mtid, - Message: msg, - }) - return nil -} - -func (ms *messageSet) Reset() { *ms = messageSet{} } -func (ms *messageSet) String() string { return CompactTextString(ms) } -func (*messageSet) ProtoMessage() {} - -// Support for the message_set_wire_format message option. - -func skipVarint(buf []byte) []byte { - i := 0 - for ; buf[i]&0x80 != 0; i++ { - } - return buf[i+1:] -} - -// unmarshalMessageSet decodes the extension map encoded in buf in the message set wire format. -// It is called by Unmarshal methods on protocol buffer messages with the message_set_wire_format option. -func unmarshalMessageSet(buf []byte, exts interface{}) error { - var m map[int32]Extension - switch exts := exts.(type) { - case *XXX_InternalExtensions: - m = exts.extensionsWrite() - case map[int32]Extension: - m = exts - default: - return errors.New("proto: not an extension map") - } - - ms := new(messageSet) - if err := Unmarshal(buf, ms); err != nil { - return err - } - for _, item := range ms.Item { - id := *item.TypeId - msg := item.Message - - // Restore wire type and field number varint, plus length varint. - // Be careful to preserve duplicate items. - b := EncodeVarint(uint64(id)<<3 | WireBytes) - if ext, ok := m[id]; ok { - // Existing data; rip off the tag and length varint - // so we join the new data correctly. - // We can assume that ext.enc is set because we are unmarshaling. - o := ext.enc[len(b):] // skip wire type and field number - _, n := DecodeVarint(o) // calculate length of length varint - o = o[n:] // skip length varint - msg = append(o, msg...) // join old data and new data - } - b = append(b, EncodeVarint(uint64(len(msg)))...) - b = append(b, msg...) - - m[id] = Extension{enc: b} - } - return nil -} diff --git a/vendor/github.com/gogo/protobuf/proto/pointer_reflect.go b/vendor/github.com/gogo/protobuf/proto/pointer_reflect.go deleted file mode 100644 index b6cad90..0000000 --- a/vendor/github.com/gogo/protobuf/proto/pointer_reflect.go +++ /dev/null @@ -1,357 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2012 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build purego appengine js - -// This file contains an implementation of proto field accesses using package reflect. -// It is slower than the code in pointer_unsafe.go but it avoids package unsafe and can -// be used on App Engine. - -package proto - -import ( - "reflect" - "sync" -) - -const unsafeAllowed = false - -// A field identifies a field in a struct, accessible from a pointer. -// In this implementation, a field is identified by the sequence of field indices -// passed to reflect's FieldByIndex. -type field []int - -// toField returns a field equivalent to the given reflect field. -func toField(f *reflect.StructField) field { - return f.Index -} - -// invalidField is an invalid field identifier. -var invalidField = field(nil) - -// zeroField is a noop when calling pointer.offset. -var zeroField = field([]int{}) - -// IsValid reports whether the field identifier is valid. -func (f field) IsValid() bool { return f != nil } - -// The pointer type is for the table-driven decoder. -// The implementation here uses a reflect.Value of pointer type to -// create a generic pointer. In pointer_unsafe.go we use unsafe -// instead of reflect to implement the same (but faster) interface. -type pointer struct { - v reflect.Value -} - -// toPointer converts an interface of pointer type to a pointer -// that points to the same target. -func toPointer(i *Message) pointer { - return pointer{v: reflect.ValueOf(*i)} -} - -// toAddrPointer converts an interface to a pointer that points to -// the interface data. -func toAddrPointer(i *interface{}, isptr bool) pointer { - v := reflect.ValueOf(*i) - u := reflect.New(v.Type()) - u.Elem().Set(v) - return pointer{v: u} -} - -// valToPointer converts v to a pointer. v must be of pointer type. -func valToPointer(v reflect.Value) pointer { - return pointer{v: v} -} - -// offset converts from a pointer to a structure to a pointer to -// one of its fields. -func (p pointer) offset(f field) pointer { - return pointer{v: p.v.Elem().FieldByIndex(f).Addr()} -} - -func (p pointer) isNil() bool { - return p.v.IsNil() -} - -// grow updates the slice s in place to make it one element longer. -// s must be addressable. -// Returns the (addressable) new element. -func grow(s reflect.Value) reflect.Value { - n, m := s.Len(), s.Cap() - if n < m { - s.SetLen(n + 1) - } else { - s.Set(reflect.Append(s, reflect.Zero(s.Type().Elem()))) - } - return s.Index(n) -} - -func (p pointer) toInt64() *int64 { - return p.v.Interface().(*int64) -} -func (p pointer) toInt64Ptr() **int64 { - return p.v.Interface().(**int64) -} -func (p pointer) toInt64Slice() *[]int64 { - return p.v.Interface().(*[]int64) -} - -var int32ptr = reflect.TypeOf((*int32)(nil)) - -func (p pointer) toInt32() *int32 { - return p.v.Convert(int32ptr).Interface().(*int32) -} - -// The toInt32Ptr/Slice methods don't work because of enums. -// Instead, we must use set/get methods for the int32ptr/slice case. -/* - func (p pointer) toInt32Ptr() **int32 { - return p.v.Interface().(**int32) -} - func (p pointer) toInt32Slice() *[]int32 { - return p.v.Interface().(*[]int32) -} -*/ -func (p pointer) getInt32Ptr() *int32 { - if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { - // raw int32 type - return p.v.Elem().Interface().(*int32) - } - // an enum - return p.v.Elem().Convert(int32PtrType).Interface().(*int32) -} -func (p pointer) setInt32Ptr(v int32) { - // Allocate value in a *int32. Possibly convert that to a *enum. - // Then assign it to a **int32 or **enum. - // Note: we can convert *int32 to *enum, but we can't convert - // **int32 to **enum! - p.v.Elem().Set(reflect.ValueOf(&v).Convert(p.v.Type().Elem())) -} - -// getInt32Slice copies []int32 from p as a new slice. -// This behavior differs from the implementation in pointer_unsafe.go. -func (p pointer) getInt32Slice() []int32 { - if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { - // raw int32 type - return p.v.Elem().Interface().([]int32) - } - // an enum - // Allocate a []int32, then assign []enum's values into it. - // Note: we can't convert []enum to []int32. - slice := p.v.Elem() - s := make([]int32, slice.Len()) - for i := 0; i < slice.Len(); i++ { - s[i] = int32(slice.Index(i).Int()) - } - return s -} - -// setInt32Slice copies []int32 into p as a new slice. -// This behavior differs from the implementation in pointer_unsafe.go. -func (p pointer) setInt32Slice(v []int32) { - if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { - // raw int32 type - p.v.Elem().Set(reflect.ValueOf(v)) - return - } - // an enum - // Allocate a []enum, then assign []int32's values into it. - // Note: we can't convert []enum to []int32. - slice := reflect.MakeSlice(p.v.Type().Elem(), len(v), cap(v)) - for i, x := range v { - slice.Index(i).SetInt(int64(x)) - } - p.v.Elem().Set(slice) -} -func (p pointer) appendInt32Slice(v int32) { - grow(p.v.Elem()).SetInt(int64(v)) -} - -func (p pointer) toUint64() *uint64 { - return p.v.Interface().(*uint64) -} -func (p pointer) toUint64Ptr() **uint64 { - return p.v.Interface().(**uint64) -} -func (p pointer) toUint64Slice() *[]uint64 { - return p.v.Interface().(*[]uint64) -} -func (p pointer) toUint32() *uint32 { - return p.v.Interface().(*uint32) -} -func (p pointer) toUint32Ptr() **uint32 { - return p.v.Interface().(**uint32) -} -func (p pointer) toUint32Slice() *[]uint32 { - return p.v.Interface().(*[]uint32) -} -func (p pointer) toBool() *bool { - return p.v.Interface().(*bool) -} -func (p pointer) toBoolPtr() **bool { - return p.v.Interface().(**bool) -} -func (p pointer) toBoolSlice() *[]bool { - return p.v.Interface().(*[]bool) -} -func (p pointer) toFloat64() *float64 { - return p.v.Interface().(*float64) -} -func (p pointer) toFloat64Ptr() **float64 { - return p.v.Interface().(**float64) -} -func (p pointer) toFloat64Slice() *[]float64 { - return p.v.Interface().(*[]float64) -} -func (p pointer) toFloat32() *float32 { - return p.v.Interface().(*float32) -} -func (p pointer) toFloat32Ptr() **float32 { - return p.v.Interface().(**float32) -} -func (p pointer) toFloat32Slice() *[]float32 { - return p.v.Interface().(*[]float32) -} -func (p pointer) toString() *string { - return p.v.Interface().(*string) -} -func (p pointer) toStringPtr() **string { - return p.v.Interface().(**string) -} -func (p pointer) toStringSlice() *[]string { - return p.v.Interface().(*[]string) -} -func (p pointer) toBytes() *[]byte { - return p.v.Interface().(*[]byte) -} -func (p pointer) toBytesSlice() *[][]byte { - return p.v.Interface().(*[][]byte) -} -func (p pointer) toExtensions() *XXX_InternalExtensions { - return p.v.Interface().(*XXX_InternalExtensions) -} -func (p pointer) toOldExtensions() *map[int32]Extension { - return p.v.Interface().(*map[int32]Extension) -} -func (p pointer) getPointer() pointer { - return pointer{v: p.v.Elem()} -} -func (p pointer) setPointer(q pointer) { - p.v.Elem().Set(q.v) -} -func (p pointer) appendPointer(q pointer) { - grow(p.v.Elem()).Set(q.v) -} - -// getPointerSlice copies []*T from p as a new []pointer. -// This behavior differs from the implementation in pointer_unsafe.go. -func (p pointer) getPointerSlice() []pointer { - if p.v.IsNil() { - return nil - } - n := p.v.Elem().Len() - s := make([]pointer, n) - for i := 0; i < n; i++ { - s[i] = pointer{v: p.v.Elem().Index(i)} - } - return s -} - -// setPointerSlice copies []pointer into p as a new []*T. -// This behavior differs from the implementation in pointer_unsafe.go. -func (p pointer) setPointerSlice(v []pointer) { - if v == nil { - p.v.Elem().Set(reflect.New(p.v.Elem().Type()).Elem()) - return - } - s := reflect.MakeSlice(p.v.Elem().Type(), 0, len(v)) - for _, p := range v { - s = reflect.Append(s, p.v) - } - p.v.Elem().Set(s) -} - -// getInterfacePointer returns a pointer that points to the -// interface data of the interface pointed by p. -func (p pointer) getInterfacePointer() pointer { - if p.v.Elem().IsNil() { - return pointer{v: p.v.Elem()} - } - return pointer{v: p.v.Elem().Elem().Elem().Field(0).Addr()} // *interface -> interface -> *struct -> struct -} - -func (p pointer) asPointerTo(t reflect.Type) reflect.Value { - // TODO: check that p.v.Type().Elem() == t? - return p.v -} - -func atomicLoadUnmarshalInfo(p **unmarshalInfo) *unmarshalInfo { - atomicLock.Lock() - defer atomicLock.Unlock() - return *p -} -func atomicStoreUnmarshalInfo(p **unmarshalInfo, v *unmarshalInfo) { - atomicLock.Lock() - defer atomicLock.Unlock() - *p = v -} -func atomicLoadMarshalInfo(p **marshalInfo) *marshalInfo { - atomicLock.Lock() - defer atomicLock.Unlock() - return *p -} -func atomicStoreMarshalInfo(p **marshalInfo, v *marshalInfo) { - atomicLock.Lock() - defer atomicLock.Unlock() - *p = v -} -func atomicLoadMergeInfo(p **mergeInfo) *mergeInfo { - atomicLock.Lock() - defer atomicLock.Unlock() - return *p -} -func atomicStoreMergeInfo(p **mergeInfo, v *mergeInfo) { - atomicLock.Lock() - defer atomicLock.Unlock() - *p = v -} -func atomicLoadDiscardInfo(p **discardInfo) *discardInfo { - atomicLock.Lock() - defer atomicLock.Unlock() - return *p -} -func atomicStoreDiscardInfo(p **discardInfo, v *discardInfo) { - atomicLock.Lock() - defer atomicLock.Unlock() - *p = v -} - -var atomicLock sync.Mutex diff --git a/vendor/github.com/gogo/protobuf/proto/pointer_reflect_gogo.go b/vendor/github.com/gogo/protobuf/proto/pointer_reflect_gogo.go deleted file mode 100644 index 7ffd3c2..0000000 --- a/vendor/github.com/gogo/protobuf/proto/pointer_reflect_gogo.go +++ /dev/null @@ -1,59 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2018, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build purego appengine js - -// This file contains an implementation of proto field accesses using package reflect. -// It is slower than the code in pointer_unsafe.go but it avoids package unsafe and can -// be used on App Engine. - -package proto - -import ( - "reflect" -) - -// TODO: untested, so probably incorrect. - -func (p pointer) getRef() pointer { - return pointer{v: p.v.Addr()} -} - -func (p pointer) appendRef(v pointer, typ reflect.Type) { - slice := p.getSlice(typ) - elem := v.asPointerTo(typ).Elem() - newSlice := reflect.Append(slice, elem) - slice.Set(newSlice) -} - -func (p pointer) getSlice(typ reflect.Type) reflect.Value { - sliceTyp := reflect.SliceOf(typ) - slice := p.asPointerTo(sliceTyp) - slice = slice.Elem() - return slice -} diff --git a/vendor/github.com/gogo/protobuf/proto/pointer_unsafe.go b/vendor/github.com/gogo/protobuf/proto/pointer_unsafe.go deleted file mode 100644 index d55a335..0000000 --- a/vendor/github.com/gogo/protobuf/proto/pointer_unsafe.go +++ /dev/null @@ -1,308 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2012 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build !purego,!appengine,!js - -// This file contains the implementation of the proto field accesses using package unsafe. - -package proto - -import ( - "reflect" - "sync/atomic" - "unsafe" -) - -const unsafeAllowed = true - -// A field identifies a field in a struct, accessible from a pointer. -// In this implementation, a field is identified by its byte offset from the start of the struct. -type field uintptr - -// toField returns a field equivalent to the given reflect field. -func toField(f *reflect.StructField) field { - return field(f.Offset) -} - -// invalidField is an invalid field identifier. -const invalidField = ^field(0) - -// zeroField is a noop when calling pointer.offset. -const zeroField = field(0) - -// IsValid reports whether the field identifier is valid. -func (f field) IsValid() bool { - return f != invalidField -} - -// The pointer type below is for the new table-driven encoder/decoder. -// The implementation here uses unsafe.Pointer to create a generic pointer. -// In pointer_reflect.go we use reflect instead of unsafe to implement -// the same (but slower) interface. -type pointer struct { - p unsafe.Pointer -} - -// size of pointer -var ptrSize = unsafe.Sizeof(uintptr(0)) - -// toPointer converts an interface of pointer type to a pointer -// that points to the same target. -func toPointer(i *Message) pointer { - // Super-tricky - read pointer out of data word of interface value. - // Saves ~25ns over the equivalent: - // return valToPointer(reflect.ValueOf(*i)) - return pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]} -} - -// toAddrPointer converts an interface to a pointer that points to -// the interface data. -func toAddrPointer(i *interface{}, isptr bool) pointer { - // Super-tricky - read or get the address of data word of interface value. - if isptr { - // The interface is of pointer type, thus it is a direct interface. - // The data word is the pointer data itself. We take its address. - return pointer{p: unsafe.Pointer(uintptr(unsafe.Pointer(i)) + ptrSize)} - } - // The interface is not of pointer type. The data word is the pointer - // to the data. - return pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]} -} - -// valToPointer converts v to a pointer. v must be of pointer type. -func valToPointer(v reflect.Value) pointer { - return pointer{p: unsafe.Pointer(v.Pointer())} -} - -// offset converts from a pointer to a structure to a pointer to -// one of its fields. -func (p pointer) offset(f field) pointer { - // For safety, we should panic if !f.IsValid, however calling panic causes - // this to no longer be inlineable, which is a serious performance cost. - /* - if !f.IsValid() { - panic("invalid field") - } - */ - return pointer{p: unsafe.Pointer(uintptr(p.p) + uintptr(f))} -} - -func (p pointer) isNil() bool { - return p.p == nil -} - -func (p pointer) toInt64() *int64 { - return (*int64)(p.p) -} -func (p pointer) toInt64Ptr() **int64 { - return (**int64)(p.p) -} -func (p pointer) toInt64Slice() *[]int64 { - return (*[]int64)(p.p) -} -func (p pointer) toInt32() *int32 { - return (*int32)(p.p) -} - -// See pointer_reflect.go for why toInt32Ptr/Slice doesn't exist. -/* - func (p pointer) toInt32Ptr() **int32 { - return (**int32)(p.p) - } - func (p pointer) toInt32Slice() *[]int32 { - return (*[]int32)(p.p) - } -*/ -func (p pointer) getInt32Ptr() *int32 { - return *(**int32)(p.p) -} -func (p pointer) setInt32Ptr(v int32) { - *(**int32)(p.p) = &v -} - -// getInt32Slice loads a []int32 from p. -// The value returned is aliased with the original slice. -// This behavior differs from the implementation in pointer_reflect.go. -func (p pointer) getInt32Slice() []int32 { - return *(*[]int32)(p.p) -} - -// setInt32Slice stores a []int32 to p. -// The value set is aliased with the input slice. -// This behavior differs from the implementation in pointer_reflect.go. -func (p pointer) setInt32Slice(v []int32) { - *(*[]int32)(p.p) = v -} - -// TODO: Can we get rid of appendInt32Slice and use setInt32Slice instead? -func (p pointer) appendInt32Slice(v int32) { - s := (*[]int32)(p.p) - *s = append(*s, v) -} - -func (p pointer) toUint64() *uint64 { - return (*uint64)(p.p) -} -func (p pointer) toUint64Ptr() **uint64 { - return (**uint64)(p.p) -} -func (p pointer) toUint64Slice() *[]uint64 { - return (*[]uint64)(p.p) -} -func (p pointer) toUint32() *uint32 { - return (*uint32)(p.p) -} -func (p pointer) toUint32Ptr() **uint32 { - return (**uint32)(p.p) -} -func (p pointer) toUint32Slice() *[]uint32 { - return (*[]uint32)(p.p) -} -func (p pointer) toBool() *bool { - return (*bool)(p.p) -} -func (p pointer) toBoolPtr() **bool { - return (**bool)(p.p) -} -func (p pointer) toBoolSlice() *[]bool { - return (*[]bool)(p.p) -} -func (p pointer) toFloat64() *float64 { - return (*float64)(p.p) -} -func (p pointer) toFloat64Ptr() **float64 { - return (**float64)(p.p) -} -func (p pointer) toFloat64Slice() *[]float64 { - return (*[]float64)(p.p) -} -func (p pointer) toFloat32() *float32 { - return (*float32)(p.p) -} -func (p pointer) toFloat32Ptr() **float32 { - return (**float32)(p.p) -} -func (p pointer) toFloat32Slice() *[]float32 { - return (*[]float32)(p.p) -} -func (p pointer) toString() *string { - return (*string)(p.p) -} -func (p pointer) toStringPtr() **string { - return (**string)(p.p) -} -func (p pointer) toStringSlice() *[]string { - return (*[]string)(p.p) -} -func (p pointer) toBytes() *[]byte { - return (*[]byte)(p.p) -} -func (p pointer) toBytesSlice() *[][]byte { - return (*[][]byte)(p.p) -} -func (p pointer) toExtensions() *XXX_InternalExtensions { - return (*XXX_InternalExtensions)(p.p) -} -func (p pointer) toOldExtensions() *map[int32]Extension { - return (*map[int32]Extension)(p.p) -} - -// getPointerSlice loads []*T from p as a []pointer. -// The value returned is aliased with the original slice. -// This behavior differs from the implementation in pointer_reflect.go. -func (p pointer) getPointerSlice() []pointer { - // Super-tricky - p should point to a []*T where T is a - // message type. We load it as []pointer. - return *(*[]pointer)(p.p) -} - -// setPointerSlice stores []pointer into p as a []*T. -// The value set is aliased with the input slice. -// This behavior differs from the implementation in pointer_reflect.go. -func (p pointer) setPointerSlice(v []pointer) { - // Super-tricky - p should point to a []*T where T is a - // message type. We store it as []pointer. - *(*[]pointer)(p.p) = v -} - -// getPointer loads the pointer at p and returns it. -func (p pointer) getPointer() pointer { - return pointer{p: *(*unsafe.Pointer)(p.p)} -} - -// setPointer stores the pointer q at p. -func (p pointer) setPointer(q pointer) { - *(*unsafe.Pointer)(p.p) = q.p -} - -// append q to the slice pointed to by p. -func (p pointer) appendPointer(q pointer) { - s := (*[]unsafe.Pointer)(p.p) - *s = append(*s, q.p) -} - -// getInterfacePointer returns a pointer that points to the -// interface data of the interface pointed by p. -func (p pointer) getInterfacePointer() pointer { - // Super-tricky - read pointer out of data word of interface value. - return pointer{p: (*(*[2]unsafe.Pointer)(p.p))[1]} -} - -// asPointerTo returns a reflect.Value that is a pointer to an -// object of type t stored at p. -func (p pointer) asPointerTo(t reflect.Type) reflect.Value { - return reflect.NewAt(t, p.p) -} - -func atomicLoadUnmarshalInfo(p **unmarshalInfo) *unmarshalInfo { - return (*unmarshalInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) -} -func atomicStoreUnmarshalInfo(p **unmarshalInfo, v *unmarshalInfo) { - atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) -} -func atomicLoadMarshalInfo(p **marshalInfo) *marshalInfo { - return (*marshalInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) -} -func atomicStoreMarshalInfo(p **marshalInfo, v *marshalInfo) { - atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) -} -func atomicLoadMergeInfo(p **mergeInfo) *mergeInfo { - return (*mergeInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) -} -func atomicStoreMergeInfo(p **mergeInfo, v *mergeInfo) { - atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) -} -func atomicLoadDiscardInfo(p **discardInfo) *discardInfo { - return (*discardInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) -} -func atomicStoreDiscardInfo(p **discardInfo, v *discardInfo) { - atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) -} diff --git a/vendor/github.com/gogo/protobuf/proto/pointer_unsafe_gogo.go b/vendor/github.com/gogo/protobuf/proto/pointer_unsafe_gogo.go deleted file mode 100644 index aca8eed..0000000 --- a/vendor/github.com/gogo/protobuf/proto/pointer_unsafe_gogo.go +++ /dev/null @@ -1,56 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2018, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build !purego,!appengine,!js - -// This file contains the implementation of the proto field accesses using package unsafe. - -package proto - -import ( - "reflect" - "unsafe" -) - -func (p pointer) getRef() pointer { - return pointer{p: (unsafe.Pointer)(&p.p)} -} - -func (p pointer) appendRef(v pointer, typ reflect.Type) { - slice := p.getSlice(typ) - elem := v.asPointerTo(typ).Elem() - newSlice := reflect.Append(slice, elem) - slice.Set(newSlice) -} - -func (p pointer) getSlice(typ reflect.Type) reflect.Value { - sliceTyp := reflect.SliceOf(typ) - slice := p.asPointerTo(sliceTyp) - slice = slice.Elem() - return slice -} diff --git a/vendor/github.com/gogo/protobuf/proto/properties.go b/vendor/github.com/gogo/protobuf/proto/properties.go deleted file mode 100644 index c9e5fa0..0000000 --- a/vendor/github.com/gogo/protobuf/proto/properties.go +++ /dev/null @@ -1,599 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Routines for encoding data into the wire format for protocol buffers. - */ - -import ( - "fmt" - "log" - "os" - "reflect" - "sort" - "strconv" - "strings" - "sync" -) - -const debug bool = false - -// Constants that identify the encoding of a value on the wire. -const ( - WireVarint = 0 - WireFixed64 = 1 - WireBytes = 2 - WireStartGroup = 3 - WireEndGroup = 4 - WireFixed32 = 5 -) - -// tagMap is an optimization over map[int]int for typical protocol buffer -// use-cases. Encoded protocol buffers are often in tag order with small tag -// numbers. -type tagMap struct { - fastTags []int - slowTags map[int]int -} - -// tagMapFastLimit is the upper bound on the tag number that will be stored in -// the tagMap slice rather than its map. -const tagMapFastLimit = 1024 - -func (p *tagMap) get(t int) (int, bool) { - if t > 0 && t < tagMapFastLimit { - if t >= len(p.fastTags) { - return 0, false - } - fi := p.fastTags[t] - return fi, fi >= 0 - } - fi, ok := p.slowTags[t] - return fi, ok -} - -func (p *tagMap) put(t int, fi int) { - if t > 0 && t < tagMapFastLimit { - for len(p.fastTags) < t+1 { - p.fastTags = append(p.fastTags, -1) - } - p.fastTags[t] = fi - return - } - if p.slowTags == nil { - p.slowTags = make(map[int]int) - } - p.slowTags[t] = fi -} - -// StructProperties represents properties for all the fields of a struct. -// decoderTags and decoderOrigNames should only be used by the decoder. -type StructProperties struct { - Prop []*Properties // properties for each field - reqCount int // required count - decoderTags tagMap // map from proto tag to struct field number - decoderOrigNames map[string]int // map from original name to struct field number - order []int // list of struct field numbers in tag order - - // OneofTypes contains information about the oneof fields in this message. - // It is keyed by the original name of a field. - OneofTypes map[string]*OneofProperties -} - -// OneofProperties represents information about a specific field in a oneof. -type OneofProperties struct { - Type reflect.Type // pointer to generated struct type for this oneof field - Field int // struct field number of the containing oneof in the message - Prop *Properties -} - -// Implement the sorting interface so we can sort the fields in tag order, as recommended by the spec. -// See encode.go, (*Buffer).enc_struct. - -func (sp *StructProperties) Len() int { return len(sp.order) } -func (sp *StructProperties) Less(i, j int) bool { - return sp.Prop[sp.order[i]].Tag < sp.Prop[sp.order[j]].Tag -} -func (sp *StructProperties) Swap(i, j int) { sp.order[i], sp.order[j] = sp.order[j], sp.order[i] } - -// Properties represents the protocol-specific behavior of a single struct field. -type Properties struct { - Name string // name of the field, for error messages - OrigName string // original name before protocol compiler (always set) - JSONName string // name to use for JSON; determined by protoc - Wire string - WireType int - Tag int - Required bool - Optional bool - Repeated bool - Packed bool // relevant for repeated primitives only - Enum string // set for enum types only - proto3 bool // whether this is known to be a proto3 field - oneof bool // whether this is a oneof field - - Default string // default value - HasDefault bool // whether an explicit default was provided - CustomType string - CastType string - StdTime bool - StdDuration bool - WktPointer bool - - stype reflect.Type // set for struct types only - ctype reflect.Type // set for custom types only - sprop *StructProperties // set for struct types only - - mtype reflect.Type // set for map types only - MapKeyProp *Properties // set for map types only - MapValProp *Properties // set for map types only -} - -// String formats the properties in the protobuf struct field tag style. -func (p *Properties) String() string { - s := p.Wire - s += "," - s += strconv.Itoa(p.Tag) - if p.Required { - s += ",req" - } - if p.Optional { - s += ",opt" - } - if p.Repeated { - s += ",rep" - } - if p.Packed { - s += ",packed" - } - s += ",name=" + p.OrigName - if p.JSONName != p.OrigName { - s += ",json=" + p.JSONName - } - if p.proto3 { - s += ",proto3" - } - if p.oneof { - s += ",oneof" - } - if len(p.Enum) > 0 { - s += ",enum=" + p.Enum - } - if p.HasDefault { - s += ",def=" + p.Default - } - return s -} - -// Parse populates p by parsing a string in the protobuf struct field tag style. -func (p *Properties) Parse(s string) { - // "bytes,49,opt,name=foo,def=hello!" - fields := strings.Split(s, ",") // breaks def=, but handled below. - if len(fields) < 2 { - fmt.Fprintf(os.Stderr, "proto: tag has too few fields: %q\n", s) - return - } - - p.Wire = fields[0] - switch p.Wire { - case "varint": - p.WireType = WireVarint - case "fixed32": - p.WireType = WireFixed32 - case "fixed64": - p.WireType = WireFixed64 - case "zigzag32": - p.WireType = WireVarint - case "zigzag64": - p.WireType = WireVarint - case "bytes", "group": - p.WireType = WireBytes - // no numeric converter for non-numeric types - default: - fmt.Fprintf(os.Stderr, "proto: tag has unknown wire type: %q\n", s) - return - } - - var err error - p.Tag, err = strconv.Atoi(fields[1]) - if err != nil { - return - } - -outer: - for i := 2; i < len(fields); i++ { - f := fields[i] - switch { - case f == "req": - p.Required = true - case f == "opt": - p.Optional = true - case f == "rep": - p.Repeated = true - case f == "packed": - p.Packed = true - case strings.HasPrefix(f, "name="): - p.OrigName = f[5:] - case strings.HasPrefix(f, "json="): - p.JSONName = f[5:] - case strings.HasPrefix(f, "enum="): - p.Enum = f[5:] - case f == "proto3": - p.proto3 = true - case f == "oneof": - p.oneof = true - case strings.HasPrefix(f, "def="): - p.HasDefault = true - p.Default = f[4:] // rest of string - if i+1 < len(fields) { - // Commas aren't escaped, and def is always last. - p.Default += "," + strings.Join(fields[i+1:], ",") - break outer - } - case strings.HasPrefix(f, "embedded="): - p.OrigName = strings.Split(f, "=")[1] - case strings.HasPrefix(f, "customtype="): - p.CustomType = strings.Split(f, "=")[1] - case strings.HasPrefix(f, "casttype="): - p.CastType = strings.Split(f, "=")[1] - case f == "stdtime": - p.StdTime = true - case f == "stdduration": - p.StdDuration = true - case f == "wktptr": - p.WktPointer = true - } - } -} - -var protoMessageType = reflect.TypeOf((*Message)(nil)).Elem() - -// setFieldProps initializes the field properties for submessages and maps. -func (p *Properties) setFieldProps(typ reflect.Type, f *reflect.StructField, lockGetProp bool) { - isMap := typ.Kind() == reflect.Map - if len(p.CustomType) > 0 && !isMap { - p.ctype = typ - p.setTag(lockGetProp) - return - } - if p.StdTime && !isMap { - p.setTag(lockGetProp) - return - } - if p.StdDuration && !isMap { - p.setTag(lockGetProp) - return - } - if p.WktPointer && !isMap { - p.setTag(lockGetProp) - return - } - switch t1 := typ; t1.Kind() { - case reflect.Struct: - p.stype = typ - case reflect.Ptr: - if t1.Elem().Kind() == reflect.Struct { - p.stype = t1.Elem() - } - case reflect.Slice: - switch t2 := t1.Elem(); t2.Kind() { - case reflect.Ptr: - switch t3 := t2.Elem(); t3.Kind() { - case reflect.Struct: - p.stype = t3 - } - case reflect.Struct: - p.stype = t2 - } - - case reflect.Map: - - p.mtype = t1 - p.MapKeyProp = &Properties{} - p.MapKeyProp.init(reflect.PtrTo(p.mtype.Key()), "Key", f.Tag.Get("protobuf_key"), nil, lockGetProp) - p.MapValProp = &Properties{} - vtype := p.mtype.Elem() - if vtype.Kind() != reflect.Ptr && vtype.Kind() != reflect.Slice { - // The value type is not a message (*T) or bytes ([]byte), - // so we need encoders for the pointer to this type. - vtype = reflect.PtrTo(vtype) - } - - p.MapValProp.CustomType = p.CustomType - p.MapValProp.StdDuration = p.StdDuration - p.MapValProp.StdTime = p.StdTime - p.MapValProp.WktPointer = p.WktPointer - p.MapValProp.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp) - } - p.setTag(lockGetProp) -} - -func (p *Properties) setTag(lockGetProp bool) { - if p.stype != nil { - if lockGetProp { - p.sprop = GetProperties(p.stype) - } else { - p.sprop = getPropertiesLocked(p.stype) - } - } -} - -var ( - marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem() -) - -// Init populates the properties from a protocol buffer struct tag. -func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) { - p.init(typ, name, tag, f, true) -} - -func (p *Properties) init(typ reflect.Type, name, tag string, f *reflect.StructField, lockGetProp bool) { - // "bytes,49,opt,def=hello!" - p.Name = name - p.OrigName = name - if tag == "" { - return - } - p.Parse(tag) - p.setFieldProps(typ, f, lockGetProp) -} - -var ( - propertiesMu sync.RWMutex - propertiesMap = make(map[reflect.Type]*StructProperties) -) - -// GetProperties returns the list of properties for the type represented by t. -// t must represent a generated struct type of a protocol message. -func GetProperties(t reflect.Type) *StructProperties { - if t.Kind() != reflect.Struct { - panic("proto: type must have kind struct") - } - - // Most calls to GetProperties in a long-running program will be - // retrieving details for types we have seen before. - propertiesMu.RLock() - sprop, ok := propertiesMap[t] - propertiesMu.RUnlock() - if ok { - return sprop - } - - propertiesMu.Lock() - sprop = getPropertiesLocked(t) - propertiesMu.Unlock() - return sprop -} - -// getPropertiesLocked requires that propertiesMu is held. -func getPropertiesLocked(t reflect.Type) *StructProperties { - if prop, ok := propertiesMap[t]; ok { - return prop - } - - prop := new(StructProperties) - // in case of recursive protos, fill this in now. - propertiesMap[t] = prop - - // build properties - prop.Prop = make([]*Properties, t.NumField()) - prop.order = make([]int, t.NumField()) - - isOneofMessage := false - for i := 0; i < t.NumField(); i++ { - f := t.Field(i) - p := new(Properties) - name := f.Name - p.init(f.Type, name, f.Tag.Get("protobuf"), &f, false) - - oneof := f.Tag.Get("protobuf_oneof") // special case - if oneof != "" { - isOneofMessage = true - // Oneof fields don't use the traditional protobuf tag. - p.OrigName = oneof - } - prop.Prop[i] = p - prop.order[i] = i - if debug { - print(i, " ", f.Name, " ", t.String(), " ") - if p.Tag > 0 { - print(p.String()) - } - print("\n") - } - } - - // Re-order prop.order. - sort.Sort(prop) - - type oneofMessage interface { - XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{}) - } - if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); isOneofMessage && ok { - var oots []interface{} - _, _, _, oots = om.XXX_OneofFuncs() - - // Interpret oneof metadata. - prop.OneofTypes = make(map[string]*OneofProperties) - for _, oot := range oots { - oop := &OneofProperties{ - Type: reflect.ValueOf(oot).Type(), // *T - Prop: new(Properties), - } - sft := oop.Type.Elem().Field(0) - oop.Prop.Name = sft.Name - oop.Prop.Parse(sft.Tag.Get("protobuf")) - // There will be exactly one interface field that - // this new value is assignable to. - for i := 0; i < t.NumField(); i++ { - f := t.Field(i) - if f.Type.Kind() != reflect.Interface { - continue - } - if !oop.Type.AssignableTo(f.Type) { - continue - } - oop.Field = i - break - } - prop.OneofTypes[oop.Prop.OrigName] = oop - } - } - - // build required counts - // build tags - reqCount := 0 - prop.decoderOrigNames = make(map[string]int) - for i, p := range prop.Prop { - if strings.HasPrefix(p.Name, "XXX_") { - // Internal fields should not appear in tags/origNames maps. - // They are handled specially when encoding and decoding. - continue - } - if p.Required { - reqCount++ - } - prop.decoderTags.put(p.Tag, i) - prop.decoderOrigNames[p.OrigName] = i - } - prop.reqCount = reqCount - - return prop -} - -// A global registry of enum types. -// The generated code will register the generated maps by calling RegisterEnum. - -var enumValueMaps = make(map[string]map[string]int32) -var enumStringMaps = make(map[string]map[int32]string) - -// RegisterEnum is called from the generated code to install the enum descriptor -// maps into the global table to aid parsing text format protocol buffers. -func RegisterEnum(typeName string, unusedNameMap map[int32]string, valueMap map[string]int32) { - if _, ok := enumValueMaps[typeName]; ok { - panic("proto: duplicate enum registered: " + typeName) - } - enumValueMaps[typeName] = valueMap - if _, ok := enumStringMaps[typeName]; ok { - panic("proto: duplicate enum registered: " + typeName) - } - enumStringMaps[typeName] = unusedNameMap -} - -// EnumValueMap returns the mapping from names to integers of the -// enum type enumType, or a nil if not found. -func EnumValueMap(enumType string) map[string]int32 { - return enumValueMaps[enumType] -} - -// A registry of all linked message types. -// The string is a fully-qualified proto name ("pkg.Message"). -var ( - protoTypedNils = make(map[string]Message) // a map from proto names to typed nil pointers - protoMapTypes = make(map[string]reflect.Type) // a map from proto names to map types - revProtoTypes = make(map[reflect.Type]string) -) - -// RegisterType is called from generated code and maps from the fully qualified -// proto name to the type (pointer to struct) of the protocol buffer. -func RegisterType(x Message, name string) { - if _, ok := protoTypedNils[name]; ok { - // TODO: Some day, make this a panic. - log.Printf("proto: duplicate proto type registered: %s", name) - return - } - t := reflect.TypeOf(x) - if v := reflect.ValueOf(x); v.Kind() == reflect.Ptr && v.Pointer() == 0 { - // Generated code always calls RegisterType with nil x. - // This check is just for extra safety. - protoTypedNils[name] = x - } else { - protoTypedNils[name] = reflect.Zero(t).Interface().(Message) - } - revProtoTypes[t] = name -} - -// RegisterMapType is called from generated code and maps from the fully qualified -// proto name to the native map type of the proto map definition. -func RegisterMapType(x interface{}, name string) { - if reflect.TypeOf(x).Kind() != reflect.Map { - panic(fmt.Sprintf("RegisterMapType(%T, %q); want map", x, name)) - } - if _, ok := protoMapTypes[name]; ok { - log.Printf("proto: duplicate proto type registered: %s", name) - return - } - t := reflect.TypeOf(x) - protoMapTypes[name] = t - revProtoTypes[t] = name -} - -// MessageName returns the fully-qualified proto name for the given message type. -func MessageName(x Message) string { - type xname interface { - XXX_MessageName() string - } - if m, ok := x.(xname); ok { - return m.XXX_MessageName() - } - return revProtoTypes[reflect.TypeOf(x)] -} - -// MessageType returns the message type (pointer to struct) for a named message. -// The type is not guaranteed to implement proto.Message if the name refers to a -// map entry. -func MessageType(name string) reflect.Type { - if t, ok := protoTypedNils[name]; ok { - return reflect.TypeOf(t) - } - return protoMapTypes[name] -} - -// A registry of all linked proto files. -var ( - protoFiles = make(map[string][]byte) // file name => fileDescriptor -) - -// RegisterFile is called from generated code and maps from the -// full file name of a .proto file to its compressed FileDescriptorProto. -func RegisterFile(filename string, fileDescriptor []byte) { - protoFiles[filename] = fileDescriptor -} - -// FileDescriptor returns the compressed FileDescriptorProto for a .proto file. -func FileDescriptor(filename string) []byte { return protoFiles[filename] } diff --git a/vendor/github.com/gogo/protobuf/proto/properties_gogo.go b/vendor/github.com/gogo/protobuf/proto/properties_gogo.go deleted file mode 100644 index 40ea3dd..0000000 --- a/vendor/github.com/gogo/protobuf/proto/properties_gogo.go +++ /dev/null @@ -1,36 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2018, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "reflect" -) - -var sizerType = reflect.TypeOf((*Sizer)(nil)).Elem() -var protosizerType = reflect.TypeOf((*ProtoSizer)(nil)).Elem() diff --git a/vendor/github.com/gogo/protobuf/proto/skip_gogo.go b/vendor/github.com/gogo/protobuf/proto/skip_gogo.go deleted file mode 100644 index 5a5fd93..0000000 --- a/vendor/github.com/gogo/protobuf/proto/skip_gogo.go +++ /dev/null @@ -1,119 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "fmt" - "io" -) - -func Skip(data []byte) (n int, err error) { - l := len(data) - index := 0 - for index < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if index >= l { - return 0, io.ErrUnexpectedEOF - } - b := data[index] - index++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for { - if index >= l { - return 0, io.ErrUnexpectedEOF - } - index++ - if data[index-1] < 0x80 { - break - } - } - return index, nil - case 1: - index += 8 - return index, nil - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if index >= l { - return 0, io.ErrUnexpectedEOF - } - b := data[index] - index++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - index += length - return index, nil - case 3: - for { - var innerWire uint64 - var start int = index - for shift := uint(0); ; shift += 7 { - if index >= l { - return 0, io.ErrUnexpectedEOF - } - b := data[index] - index++ - innerWire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - innerWireType := int(innerWire & 0x7) - if innerWireType == 4 { - break - } - next, err := Skip(data[start:]) - if err != nil { - return 0, err - } - index = start + next - } - return index, nil - case 4: - return index, nil - case 5: - index += 4 - return index, nil - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - } - panic("unreachable") -} diff --git a/vendor/github.com/gogo/protobuf/proto/table_marshal.go b/vendor/github.com/gogo/protobuf/proto/table_marshal.go deleted file mode 100644 index 9b1538d..0000000 --- a/vendor/github.com/gogo/protobuf/proto/table_marshal.go +++ /dev/null @@ -1,3006 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "errors" - "fmt" - "math" - "reflect" - "sort" - "strconv" - "strings" - "sync" - "sync/atomic" - "unicode/utf8" -) - -// a sizer takes a pointer to a field and the size of its tag, computes the size of -// the encoded data. -type sizer func(pointer, int) int - -// a marshaler takes a byte slice, a pointer to a field, and its tag (in wire format), -// marshals the field to the end of the slice, returns the slice and error (if any). -type marshaler func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) - -// marshalInfo is the information used for marshaling a message. -type marshalInfo struct { - typ reflect.Type - fields []*marshalFieldInfo - unrecognized field // offset of XXX_unrecognized - extensions field // offset of XXX_InternalExtensions - v1extensions field // offset of XXX_extensions - sizecache field // offset of XXX_sizecache - initialized int32 // 0 -- only typ is set, 1 -- fully initialized - messageset bool // uses message set wire format - hasmarshaler bool // has custom marshaler - sync.RWMutex // protect extElems map, also for initialization - extElems map[int32]*marshalElemInfo // info of extension elements - - hassizer bool // has custom sizer - hasprotosizer bool // has custom protosizer - - bytesExtensions field // offset of XXX_extensions where the field type is []byte -} - -// marshalFieldInfo is the information used for marshaling a field of a message. -type marshalFieldInfo struct { - field field - wiretag uint64 // tag in wire format - tagsize int // size of tag in wire format - sizer sizer - marshaler marshaler - isPointer bool - required bool // field is required - name string // name of the field, for error reporting - oneofElems map[reflect.Type]*marshalElemInfo // info of oneof elements -} - -// marshalElemInfo is the information used for marshaling an extension or oneof element. -type marshalElemInfo struct { - wiretag uint64 // tag in wire format - tagsize int // size of tag in wire format - sizer sizer - marshaler marshaler - isptr bool // elem is pointer typed, thus interface of this type is a direct interface (extension only) -} - -var ( - marshalInfoMap = map[reflect.Type]*marshalInfo{} - marshalInfoLock sync.Mutex - - uint8SliceType = reflect.TypeOf(([]uint8)(nil)).Kind() -) - -// getMarshalInfo returns the information to marshal a given type of message. -// The info it returns may not necessarily initialized. -// t is the type of the message (NOT the pointer to it). -func getMarshalInfo(t reflect.Type) *marshalInfo { - marshalInfoLock.Lock() - u, ok := marshalInfoMap[t] - if !ok { - u = &marshalInfo{typ: t} - marshalInfoMap[t] = u - } - marshalInfoLock.Unlock() - return u -} - -// Size is the entry point from generated code, -// and should be ONLY called by generated code. -// It computes the size of encoded data of msg. -// a is a pointer to a place to store cached marshal info. -func (a *InternalMessageInfo) Size(msg Message) int { - u := getMessageMarshalInfo(msg, a) - ptr := toPointer(&msg) - if ptr.isNil() { - // We get here if msg is a typed nil ((*SomeMessage)(nil)), - // so it satisfies the interface, and msg == nil wouldn't - // catch it. We don't want crash in this case. - return 0 - } - return u.size(ptr) -} - -// Marshal is the entry point from generated code, -// and should be ONLY called by generated code. -// It marshals msg to the end of b. -// a is a pointer to a place to store cached marshal info. -func (a *InternalMessageInfo) Marshal(b []byte, msg Message, deterministic bool) ([]byte, error) { - u := getMessageMarshalInfo(msg, a) - ptr := toPointer(&msg) - if ptr.isNil() { - // We get here if msg is a typed nil ((*SomeMessage)(nil)), - // so it satisfies the interface, and msg == nil wouldn't - // catch it. We don't want crash in this case. - return b, ErrNil - } - return u.marshal(b, ptr, deterministic) -} - -func getMessageMarshalInfo(msg interface{}, a *InternalMessageInfo) *marshalInfo { - // u := a.marshal, but atomically. - // We use an atomic here to ensure memory consistency. - u := atomicLoadMarshalInfo(&a.marshal) - if u == nil { - // Get marshal information from type of message. - t := reflect.ValueOf(msg).Type() - if t.Kind() != reflect.Ptr { - panic(fmt.Sprintf("cannot handle non-pointer message type %v", t)) - } - u = getMarshalInfo(t.Elem()) - // Store it in the cache for later users. - // a.marshal = u, but atomically. - atomicStoreMarshalInfo(&a.marshal, u) - } - return u -} - -// size is the main function to compute the size of the encoded data of a message. -// ptr is the pointer to the message. -func (u *marshalInfo) size(ptr pointer) int { - if atomic.LoadInt32(&u.initialized) == 0 { - u.computeMarshalInfo() - } - - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - if u.hasmarshaler { - // Uses the message's Size method if available - if u.hassizer { - s := ptr.asPointerTo(u.typ).Interface().(Sizer) - return s.Size() - } - // Uses the message's ProtoSize method if available - if u.hasprotosizer { - s := ptr.asPointerTo(u.typ).Interface().(ProtoSizer) - return s.ProtoSize() - } - - m := ptr.asPointerTo(u.typ).Interface().(Marshaler) - b, _ := m.Marshal() - return len(b) - } - - n := 0 - for _, f := range u.fields { - if f.isPointer && ptr.offset(f.field).getPointer().isNil() { - // nil pointer always marshals to nothing - continue - } - n += f.sizer(ptr.offset(f.field), f.tagsize) - } - if u.extensions.IsValid() { - e := ptr.offset(u.extensions).toExtensions() - if u.messageset { - n += u.sizeMessageSet(e) - } else { - n += u.sizeExtensions(e) - } - } - if u.v1extensions.IsValid() { - m := *ptr.offset(u.v1extensions).toOldExtensions() - n += u.sizeV1Extensions(m) - } - if u.bytesExtensions.IsValid() { - s := *ptr.offset(u.bytesExtensions).toBytes() - n += len(s) - } - if u.unrecognized.IsValid() { - s := *ptr.offset(u.unrecognized).toBytes() - n += len(s) - } - - // cache the result for use in marshal - if u.sizecache.IsValid() { - atomic.StoreInt32(ptr.offset(u.sizecache).toInt32(), int32(n)) - } - return n -} - -// cachedsize gets the size from cache. If there is no cache (i.e. message is not generated), -// fall back to compute the size. -func (u *marshalInfo) cachedsize(ptr pointer) int { - if u.sizecache.IsValid() { - return int(atomic.LoadInt32(ptr.offset(u.sizecache).toInt32())) - } - return u.size(ptr) -} - -// marshal is the main function to marshal a message. It takes a byte slice and appends -// the encoded data to the end of the slice, returns the slice and error (if any). -// ptr is the pointer to the message. -// If deterministic is true, map is marshaled in deterministic order. -func (u *marshalInfo) marshal(b []byte, ptr pointer, deterministic bool) ([]byte, error) { - if atomic.LoadInt32(&u.initialized) == 0 { - u.computeMarshalInfo() - } - - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - if u.hasmarshaler { - m := ptr.asPointerTo(u.typ).Interface().(Marshaler) - b1, err := m.Marshal() - b = append(b, b1...) - return b, err - } - - var err, errLater error - // The old marshaler encodes extensions at beginning. - if u.extensions.IsValid() { - e := ptr.offset(u.extensions).toExtensions() - if u.messageset { - b, err = u.appendMessageSet(b, e, deterministic) - } else { - b, err = u.appendExtensions(b, e, deterministic) - } - if err != nil { - return b, err - } - } - if u.v1extensions.IsValid() { - m := *ptr.offset(u.v1extensions).toOldExtensions() - b, err = u.appendV1Extensions(b, m, deterministic) - if err != nil { - return b, err - } - } - if u.bytesExtensions.IsValid() { - s := *ptr.offset(u.bytesExtensions).toBytes() - b = append(b, s...) - } - for _, f := range u.fields { - if f.required { - if f.isPointer && ptr.offset(f.field).getPointer().isNil() { - // Required field is not set. - // We record the error but keep going, to give a complete marshaling. - if errLater == nil { - errLater = &RequiredNotSetError{f.name} - } - continue - } - } - if f.isPointer && ptr.offset(f.field).getPointer().isNil() { - // nil pointer always marshals to nothing - continue - } - b, err = f.marshaler(b, ptr.offset(f.field), f.wiretag, deterministic) - if err != nil { - if err1, ok := err.(*RequiredNotSetError); ok { - // Required field in submessage is not set. - // We record the error but keep going, to give a complete marshaling. - if errLater == nil { - errLater = &RequiredNotSetError{f.name + "." + err1.field} - } - continue - } - if err == errRepeatedHasNil { - err = errors.New("proto: repeated field " + f.name + " has nil element") - } - if err == errInvalidUTF8 { - if errLater == nil { - fullName := revProtoTypes[reflect.PtrTo(u.typ)] + "." + f.name - errLater = &invalidUTF8Error{fullName} - } - continue - } - return b, err - } - } - if u.unrecognized.IsValid() { - s := *ptr.offset(u.unrecognized).toBytes() - b = append(b, s...) - } - return b, errLater -} - -// computeMarshalInfo initializes the marshal info. -func (u *marshalInfo) computeMarshalInfo() { - u.Lock() - defer u.Unlock() - if u.initialized != 0 { // non-atomic read is ok as it is protected by the lock - return - } - - t := u.typ - u.unrecognized = invalidField - u.extensions = invalidField - u.v1extensions = invalidField - u.bytesExtensions = invalidField - u.sizecache = invalidField - isOneofMessage := false - - if reflect.PtrTo(t).Implements(sizerType) { - u.hassizer = true - } - if reflect.PtrTo(t).Implements(protosizerType) { - u.hasprotosizer = true - } - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - if reflect.PtrTo(t).Implements(marshalerType) { - u.hasmarshaler = true - atomic.StoreInt32(&u.initialized, 1) - return - } - - n := t.NumField() - - // deal with XXX fields first - for i := 0; i < t.NumField(); i++ { - f := t.Field(i) - if f.Tag.Get("protobuf_oneof") != "" { - isOneofMessage = true - } - if !strings.HasPrefix(f.Name, "XXX_") { - continue - } - switch f.Name { - case "XXX_sizecache": - u.sizecache = toField(&f) - case "XXX_unrecognized": - u.unrecognized = toField(&f) - case "XXX_InternalExtensions": - u.extensions = toField(&f) - u.messageset = f.Tag.Get("protobuf_messageset") == "1" - case "XXX_extensions": - if f.Type.Kind() == reflect.Map { - u.v1extensions = toField(&f) - } else { - u.bytesExtensions = toField(&f) - } - case "XXX_NoUnkeyedLiteral": - // nothing to do - default: - panic("unknown XXX field: " + f.Name) - } - n-- - } - - // get oneof implementers - var oneofImplementers []interface{} - // gogo: isOneofMessage is needed for embedded oneof messages, without a marshaler and unmarshaler - if m, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok && isOneofMessage { - _, _, _, oneofImplementers = m.XXX_OneofFuncs() - } - - // normal fields - fields := make([]marshalFieldInfo, n) // batch allocation - u.fields = make([]*marshalFieldInfo, 0, n) - for i, j := 0, 0; i < t.NumField(); i++ { - f := t.Field(i) - - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - field := &fields[j] - j++ - field.name = f.Name - u.fields = append(u.fields, field) - if f.Tag.Get("protobuf_oneof") != "" { - field.computeOneofFieldInfo(&f, oneofImplementers) - continue - } - if f.Tag.Get("protobuf") == "" { - // field has no tag (not in generated message), ignore it - u.fields = u.fields[:len(u.fields)-1] - j-- - continue - } - field.computeMarshalFieldInfo(&f) - } - - // fields are marshaled in tag order on the wire. - sort.Sort(byTag(u.fields)) - - atomic.StoreInt32(&u.initialized, 1) -} - -// helper for sorting fields by tag -type byTag []*marshalFieldInfo - -func (a byTag) Len() int { return len(a) } -func (a byTag) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a byTag) Less(i, j int) bool { return a[i].wiretag < a[j].wiretag } - -// getExtElemInfo returns the information to marshal an extension element. -// The info it returns is initialized. -func (u *marshalInfo) getExtElemInfo(desc *ExtensionDesc) *marshalElemInfo { - // get from cache first - u.RLock() - e, ok := u.extElems[desc.Field] - u.RUnlock() - if ok { - return e - } - - t := reflect.TypeOf(desc.ExtensionType) // pointer or slice to basic type or struct - tags := strings.Split(desc.Tag, ",") - tag, err := strconv.Atoi(tags[1]) - if err != nil { - panic("tag is not an integer") - } - wt := wiretype(tags[0]) - sizr, marshalr := typeMarshaler(t, tags, false, false) - e = &marshalElemInfo{ - wiretag: uint64(tag)<<3 | wt, - tagsize: SizeVarint(uint64(tag) << 3), - sizer: sizr, - marshaler: marshalr, - isptr: t.Kind() == reflect.Ptr, - } - - // update cache - u.Lock() - if u.extElems == nil { - u.extElems = make(map[int32]*marshalElemInfo) - } - u.extElems[desc.Field] = e - u.Unlock() - return e -} - -// computeMarshalFieldInfo fills up the information to marshal a field. -func (fi *marshalFieldInfo) computeMarshalFieldInfo(f *reflect.StructField) { - // parse protobuf tag of the field. - // tag has format of "bytes,49,opt,name=foo,def=hello!" - tags := strings.Split(f.Tag.Get("protobuf"), ",") - if tags[0] == "" { - return - } - tag, err := strconv.Atoi(tags[1]) - if err != nil { - panic("tag is not an integer") - } - wt := wiretype(tags[0]) - if tags[2] == "req" { - fi.required = true - } - fi.setTag(f, tag, wt) - fi.setMarshaler(f, tags) -} - -func (fi *marshalFieldInfo) computeOneofFieldInfo(f *reflect.StructField, oneofImplementers []interface{}) { - fi.field = toField(f) - fi.wiretag = math.MaxInt32 // Use a large tag number, make oneofs sorted at the end. This tag will not appear on the wire. - fi.isPointer = true - fi.sizer, fi.marshaler = makeOneOfMarshaler(fi, f) - fi.oneofElems = make(map[reflect.Type]*marshalElemInfo) - - ityp := f.Type // interface type - for _, o := range oneofImplementers { - t := reflect.TypeOf(o) - if !t.Implements(ityp) { - continue - } - sf := t.Elem().Field(0) // oneof implementer is a struct with a single field - tags := strings.Split(sf.Tag.Get("protobuf"), ",") - tag, err := strconv.Atoi(tags[1]) - if err != nil { - panic("tag is not an integer") - } - wt := wiretype(tags[0]) - sizr, marshalr := typeMarshaler(sf.Type, tags, false, true) // oneof should not omit any zero value - fi.oneofElems[t.Elem()] = &marshalElemInfo{ - wiretag: uint64(tag)<<3 | wt, - tagsize: SizeVarint(uint64(tag) << 3), - sizer: sizr, - marshaler: marshalr, - } - } -} - -type oneofMessage interface { - XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{}) -} - -// wiretype returns the wire encoding of the type. -func wiretype(encoding string) uint64 { - switch encoding { - case "fixed32": - return WireFixed32 - case "fixed64": - return WireFixed64 - case "varint", "zigzag32", "zigzag64": - return WireVarint - case "bytes": - return WireBytes - case "group": - return WireStartGroup - } - panic("unknown wire type " + encoding) -} - -// setTag fills up the tag (in wire format) and its size in the info of a field. -func (fi *marshalFieldInfo) setTag(f *reflect.StructField, tag int, wt uint64) { - fi.field = toField(f) - fi.wiretag = uint64(tag)<<3 | wt - fi.tagsize = SizeVarint(uint64(tag) << 3) -} - -// setMarshaler fills up the sizer and marshaler in the info of a field. -func (fi *marshalFieldInfo) setMarshaler(f *reflect.StructField, tags []string) { - switch f.Type.Kind() { - case reflect.Map: - // map field - fi.isPointer = true - fi.sizer, fi.marshaler = makeMapMarshaler(f) - return - case reflect.Ptr, reflect.Slice: - fi.isPointer = true - } - fi.sizer, fi.marshaler = typeMarshaler(f.Type, tags, true, false) -} - -// typeMarshaler returns the sizer and marshaler of a given field. -// t is the type of the field. -// tags is the generated "protobuf" tag of the field. -// If nozero is true, zero value is not marshaled to the wire. -// If oneof is true, it is a oneof field. -func typeMarshaler(t reflect.Type, tags []string, nozero, oneof bool) (sizer, marshaler) { - encoding := tags[0] - - pointer := false - slice := false - if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { - slice = true - t = t.Elem() - } - if t.Kind() == reflect.Ptr { - pointer = true - t = t.Elem() - } - - packed := false - proto3 := false - ctype := false - isTime := false - isDuration := false - isWktPointer := false - validateUTF8 := true - for i := 2; i < len(tags); i++ { - if tags[i] == "packed" { - packed = true - } - if tags[i] == "proto3" { - proto3 = true - } - if strings.HasPrefix(tags[i], "customtype=") { - ctype = true - } - if tags[i] == "stdtime" { - isTime = true - } - if tags[i] == "stdduration" { - isDuration = true - } - if tags[i] == "wktptr" { - isWktPointer = true - } - } - validateUTF8 = validateUTF8 && proto3 - if !proto3 && !pointer && !slice { - nozero = false - } - - if ctype { - if reflect.PtrTo(t).Implements(customType) { - if slice { - return makeMessageRefSliceMarshaler(getMarshalInfo(t)) - } - if pointer { - return makeCustomPtrMarshaler(getMarshalInfo(t)) - } - return makeCustomMarshaler(getMarshalInfo(t)) - } else { - panic(fmt.Sprintf("custom type: type: %v, does not implement the proto.custom interface", t)) - } - } - - if isTime { - if pointer { - if slice { - return makeTimePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeTimePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeTimeSliceMarshaler(getMarshalInfo(t)) - } - return makeTimeMarshaler(getMarshalInfo(t)) - } - - if isDuration { - if pointer { - if slice { - return makeDurationPtrSliceMarshaler(getMarshalInfo(t)) - } - return makeDurationPtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeDurationSliceMarshaler(getMarshalInfo(t)) - } - return makeDurationMarshaler(getMarshalInfo(t)) - } - - if isWktPointer { - switch t.Kind() { - case reflect.Float64: - if pointer { - if slice { - return makeStdDoubleValuePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeStdDoubleValuePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeStdDoubleValueSliceMarshaler(getMarshalInfo(t)) - } - return makeStdDoubleValueMarshaler(getMarshalInfo(t)) - case reflect.Float32: - if pointer { - if slice { - return makeStdFloatValuePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeStdFloatValuePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeStdFloatValueSliceMarshaler(getMarshalInfo(t)) - } - return makeStdFloatValueMarshaler(getMarshalInfo(t)) - case reflect.Int64: - if pointer { - if slice { - return makeStdInt64ValuePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeStdInt64ValuePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeStdInt64ValueSliceMarshaler(getMarshalInfo(t)) - } - return makeStdInt64ValueMarshaler(getMarshalInfo(t)) - case reflect.Uint64: - if pointer { - if slice { - return makeStdUInt64ValuePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeStdUInt64ValuePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeStdUInt64ValueSliceMarshaler(getMarshalInfo(t)) - } - return makeStdUInt64ValueMarshaler(getMarshalInfo(t)) - case reflect.Int32: - if pointer { - if slice { - return makeStdInt32ValuePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeStdInt32ValuePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeStdInt32ValueSliceMarshaler(getMarshalInfo(t)) - } - return makeStdInt32ValueMarshaler(getMarshalInfo(t)) - case reflect.Uint32: - if pointer { - if slice { - return makeStdUInt32ValuePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeStdUInt32ValuePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeStdUInt32ValueSliceMarshaler(getMarshalInfo(t)) - } - return makeStdUInt32ValueMarshaler(getMarshalInfo(t)) - case reflect.Bool: - if pointer { - if slice { - return makeStdBoolValuePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeStdBoolValuePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeStdBoolValueSliceMarshaler(getMarshalInfo(t)) - } - return makeStdBoolValueMarshaler(getMarshalInfo(t)) - case reflect.String: - if pointer { - if slice { - return makeStdStringValuePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeStdStringValuePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeStdStringValueSliceMarshaler(getMarshalInfo(t)) - } - return makeStdStringValueMarshaler(getMarshalInfo(t)) - case uint8SliceType: - if pointer { - if slice { - return makeStdBytesValuePtrSliceMarshaler(getMarshalInfo(t)) - } - return makeStdBytesValuePtrMarshaler(getMarshalInfo(t)) - } - if slice { - return makeStdBytesValueSliceMarshaler(getMarshalInfo(t)) - } - return makeStdBytesValueMarshaler(getMarshalInfo(t)) - default: - panic(fmt.Sprintf("unknown wktpointer type %#v", t)) - } - } - - switch t.Kind() { - case reflect.Bool: - if pointer { - return sizeBoolPtr, appendBoolPtr - } - if slice { - if packed { - return sizeBoolPackedSlice, appendBoolPackedSlice - } - return sizeBoolSlice, appendBoolSlice - } - if nozero { - return sizeBoolValueNoZero, appendBoolValueNoZero - } - return sizeBoolValue, appendBoolValue - case reflect.Uint32: - switch encoding { - case "fixed32": - if pointer { - return sizeFixed32Ptr, appendFixed32Ptr - } - if slice { - if packed { - return sizeFixed32PackedSlice, appendFixed32PackedSlice - } - return sizeFixed32Slice, appendFixed32Slice - } - if nozero { - return sizeFixed32ValueNoZero, appendFixed32ValueNoZero - } - return sizeFixed32Value, appendFixed32Value - case "varint": - if pointer { - return sizeVarint32Ptr, appendVarint32Ptr - } - if slice { - if packed { - return sizeVarint32PackedSlice, appendVarint32PackedSlice - } - return sizeVarint32Slice, appendVarint32Slice - } - if nozero { - return sizeVarint32ValueNoZero, appendVarint32ValueNoZero - } - return sizeVarint32Value, appendVarint32Value - } - case reflect.Int32: - switch encoding { - case "fixed32": - if pointer { - return sizeFixedS32Ptr, appendFixedS32Ptr - } - if slice { - if packed { - return sizeFixedS32PackedSlice, appendFixedS32PackedSlice - } - return sizeFixedS32Slice, appendFixedS32Slice - } - if nozero { - return sizeFixedS32ValueNoZero, appendFixedS32ValueNoZero - } - return sizeFixedS32Value, appendFixedS32Value - case "varint": - if pointer { - return sizeVarintS32Ptr, appendVarintS32Ptr - } - if slice { - if packed { - return sizeVarintS32PackedSlice, appendVarintS32PackedSlice - } - return sizeVarintS32Slice, appendVarintS32Slice - } - if nozero { - return sizeVarintS32ValueNoZero, appendVarintS32ValueNoZero - } - return sizeVarintS32Value, appendVarintS32Value - case "zigzag32": - if pointer { - return sizeZigzag32Ptr, appendZigzag32Ptr - } - if slice { - if packed { - return sizeZigzag32PackedSlice, appendZigzag32PackedSlice - } - return sizeZigzag32Slice, appendZigzag32Slice - } - if nozero { - return sizeZigzag32ValueNoZero, appendZigzag32ValueNoZero - } - return sizeZigzag32Value, appendZigzag32Value - } - case reflect.Uint64: - switch encoding { - case "fixed64": - if pointer { - return sizeFixed64Ptr, appendFixed64Ptr - } - if slice { - if packed { - return sizeFixed64PackedSlice, appendFixed64PackedSlice - } - return sizeFixed64Slice, appendFixed64Slice - } - if nozero { - return sizeFixed64ValueNoZero, appendFixed64ValueNoZero - } - return sizeFixed64Value, appendFixed64Value - case "varint": - if pointer { - return sizeVarint64Ptr, appendVarint64Ptr - } - if slice { - if packed { - return sizeVarint64PackedSlice, appendVarint64PackedSlice - } - return sizeVarint64Slice, appendVarint64Slice - } - if nozero { - return sizeVarint64ValueNoZero, appendVarint64ValueNoZero - } - return sizeVarint64Value, appendVarint64Value - } - case reflect.Int64: - switch encoding { - case "fixed64": - if pointer { - return sizeFixedS64Ptr, appendFixedS64Ptr - } - if slice { - if packed { - return sizeFixedS64PackedSlice, appendFixedS64PackedSlice - } - return sizeFixedS64Slice, appendFixedS64Slice - } - if nozero { - return sizeFixedS64ValueNoZero, appendFixedS64ValueNoZero - } - return sizeFixedS64Value, appendFixedS64Value - case "varint": - if pointer { - return sizeVarintS64Ptr, appendVarintS64Ptr - } - if slice { - if packed { - return sizeVarintS64PackedSlice, appendVarintS64PackedSlice - } - return sizeVarintS64Slice, appendVarintS64Slice - } - if nozero { - return sizeVarintS64ValueNoZero, appendVarintS64ValueNoZero - } - return sizeVarintS64Value, appendVarintS64Value - case "zigzag64": - if pointer { - return sizeZigzag64Ptr, appendZigzag64Ptr - } - if slice { - if packed { - return sizeZigzag64PackedSlice, appendZigzag64PackedSlice - } - return sizeZigzag64Slice, appendZigzag64Slice - } - if nozero { - return sizeZigzag64ValueNoZero, appendZigzag64ValueNoZero - } - return sizeZigzag64Value, appendZigzag64Value - } - case reflect.Float32: - if pointer { - return sizeFloat32Ptr, appendFloat32Ptr - } - if slice { - if packed { - return sizeFloat32PackedSlice, appendFloat32PackedSlice - } - return sizeFloat32Slice, appendFloat32Slice - } - if nozero { - return sizeFloat32ValueNoZero, appendFloat32ValueNoZero - } - return sizeFloat32Value, appendFloat32Value - case reflect.Float64: - if pointer { - return sizeFloat64Ptr, appendFloat64Ptr - } - if slice { - if packed { - return sizeFloat64PackedSlice, appendFloat64PackedSlice - } - return sizeFloat64Slice, appendFloat64Slice - } - if nozero { - return sizeFloat64ValueNoZero, appendFloat64ValueNoZero - } - return sizeFloat64Value, appendFloat64Value - case reflect.String: - if validateUTF8 { - if pointer { - return sizeStringPtr, appendUTF8StringPtr - } - if slice { - return sizeStringSlice, appendUTF8StringSlice - } - if nozero { - return sizeStringValueNoZero, appendUTF8StringValueNoZero - } - return sizeStringValue, appendUTF8StringValue - } - if pointer { - return sizeStringPtr, appendStringPtr - } - if slice { - return sizeStringSlice, appendStringSlice - } - if nozero { - return sizeStringValueNoZero, appendStringValueNoZero - } - return sizeStringValue, appendStringValue - case reflect.Slice: - if slice { - return sizeBytesSlice, appendBytesSlice - } - if oneof { - // Oneof bytes field may also have "proto3" tag. - // We want to marshal it as a oneof field. Do this - // check before the proto3 check. - return sizeBytesOneof, appendBytesOneof - } - if proto3 { - return sizeBytes3, appendBytes3 - } - return sizeBytes, appendBytes - case reflect.Struct: - switch encoding { - case "group": - if slice { - return makeGroupSliceMarshaler(getMarshalInfo(t)) - } - return makeGroupMarshaler(getMarshalInfo(t)) - case "bytes": - if pointer { - if slice { - return makeMessageSliceMarshaler(getMarshalInfo(t)) - } - return makeMessageMarshaler(getMarshalInfo(t)) - } else { - if slice { - return makeMessageRefSliceMarshaler(getMarshalInfo(t)) - } - return makeMessageRefMarshaler(getMarshalInfo(t)) - } - } - } - panic(fmt.Sprintf("unknown or mismatched type: type: %v, wire type: %v", t, encoding)) -} - -// Below are functions to size/marshal a specific type of a field. -// They are stored in the field's info, and called by function pointers. -// They have type sizer or marshaler. - -func sizeFixed32Value(_ pointer, tagsize int) int { - return 4 + tagsize -} -func sizeFixed32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toUint32() - if v == 0 { - return 0 - } - return 4 + tagsize -} -func sizeFixed32Ptr(ptr pointer, tagsize int) int { - p := *ptr.toUint32Ptr() - if p == nil { - return 0 - } - return 4 + tagsize -} -func sizeFixed32Slice(ptr pointer, tagsize int) int { - s := *ptr.toUint32Slice() - return (4 + tagsize) * len(s) -} -func sizeFixed32PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toUint32Slice() - if len(s) == 0 { - return 0 - } - return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize -} -func sizeFixedS32Value(_ pointer, tagsize int) int { - return 4 + tagsize -} -func sizeFixedS32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - if v == 0 { - return 0 - } - return 4 + tagsize -} -func sizeFixedS32Ptr(ptr pointer, tagsize int) int { - p := ptr.getInt32Ptr() - if p == nil { - return 0 - } - return 4 + tagsize -} -func sizeFixedS32Slice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - return (4 + tagsize) * len(s) -} -func sizeFixedS32PackedSlice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - if len(s) == 0 { - return 0 - } - return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize -} -func sizeFloat32Value(_ pointer, tagsize int) int { - return 4 + tagsize -} -func sizeFloat32ValueNoZero(ptr pointer, tagsize int) int { - v := math.Float32bits(*ptr.toFloat32()) - if v == 0 { - return 0 - } - return 4 + tagsize -} -func sizeFloat32Ptr(ptr pointer, tagsize int) int { - p := *ptr.toFloat32Ptr() - if p == nil { - return 0 - } - return 4 + tagsize -} -func sizeFloat32Slice(ptr pointer, tagsize int) int { - s := *ptr.toFloat32Slice() - return (4 + tagsize) * len(s) -} -func sizeFloat32PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toFloat32Slice() - if len(s) == 0 { - return 0 - } - return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize -} -func sizeFixed64Value(_ pointer, tagsize int) int { - return 8 + tagsize -} -func sizeFixed64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toUint64() - if v == 0 { - return 0 - } - return 8 + tagsize -} -func sizeFixed64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toUint64Ptr() - if p == nil { - return 0 - } - return 8 + tagsize -} -func sizeFixed64Slice(ptr pointer, tagsize int) int { - s := *ptr.toUint64Slice() - return (8 + tagsize) * len(s) -} -func sizeFixed64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toUint64Slice() - if len(s) == 0 { - return 0 - } - return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize -} -func sizeFixedS64Value(_ pointer, tagsize int) int { - return 8 + tagsize -} -func sizeFixedS64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - if v == 0 { - return 0 - } - return 8 + tagsize -} -func sizeFixedS64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toInt64Ptr() - if p == nil { - return 0 - } - return 8 + tagsize -} -func sizeFixedS64Slice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - return (8 + tagsize) * len(s) -} -func sizeFixedS64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return 0 - } - return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize -} -func sizeFloat64Value(_ pointer, tagsize int) int { - return 8 + tagsize -} -func sizeFloat64ValueNoZero(ptr pointer, tagsize int) int { - v := math.Float64bits(*ptr.toFloat64()) - if v == 0 { - return 0 - } - return 8 + tagsize -} -func sizeFloat64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toFloat64Ptr() - if p == nil { - return 0 - } - return 8 + tagsize -} -func sizeFloat64Slice(ptr pointer, tagsize int) int { - s := *ptr.toFloat64Slice() - return (8 + tagsize) * len(s) -} -func sizeFloat64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toFloat64Slice() - if len(s) == 0 { - return 0 - } - return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize -} -func sizeVarint32Value(ptr pointer, tagsize int) int { - v := *ptr.toUint32() - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarint32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toUint32() - if v == 0 { - return 0 - } - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarint32Ptr(ptr pointer, tagsize int) int { - p := *ptr.toUint32Ptr() - if p == nil { - return 0 - } - return SizeVarint(uint64(*p)) + tagsize -} -func sizeVarint32Slice(ptr pointer, tagsize int) int { - s := *ptr.toUint32Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) + tagsize - } - return n -} -func sizeVarint32PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toUint32Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeVarintS32Value(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarintS32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - if v == 0 { - return 0 - } - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarintS32Ptr(ptr pointer, tagsize int) int { - p := ptr.getInt32Ptr() - if p == nil { - return 0 - } - return SizeVarint(uint64(*p)) + tagsize -} -func sizeVarintS32Slice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) + tagsize - } - return n -} -func sizeVarintS32PackedSlice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeVarint64Value(ptr pointer, tagsize int) int { - v := *ptr.toUint64() - return SizeVarint(v) + tagsize -} -func sizeVarint64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toUint64() - if v == 0 { - return 0 - } - return SizeVarint(v) + tagsize -} -func sizeVarint64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toUint64Ptr() - if p == nil { - return 0 - } - return SizeVarint(*p) + tagsize -} -func sizeVarint64Slice(ptr pointer, tagsize int) int { - s := *ptr.toUint64Slice() - n := 0 - for _, v := range s { - n += SizeVarint(v) + tagsize - } - return n -} -func sizeVarint64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toUint64Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(v) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeVarintS64Value(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarintS64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - if v == 0 { - return 0 - } - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarintS64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toInt64Ptr() - if p == nil { - return 0 - } - return SizeVarint(uint64(*p)) + tagsize -} -func sizeVarintS64Slice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) + tagsize - } - return n -} -func sizeVarintS64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeZigzag32Value(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize -} -func sizeZigzag32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - if v == 0 { - return 0 - } - return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize -} -func sizeZigzag32Ptr(ptr pointer, tagsize int) int { - p := ptr.getInt32Ptr() - if p == nil { - return 0 - } - v := *p - return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize -} -func sizeZigzag32Slice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize - } - return n -} -func sizeZigzag32PackedSlice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeZigzag64Value(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize -} -func sizeZigzag64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - if v == 0 { - return 0 - } - return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize -} -func sizeZigzag64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toInt64Ptr() - if p == nil { - return 0 - } - v := *p - return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize -} -func sizeZigzag64Slice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize - } - return n -} -func sizeZigzag64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeBoolValue(_ pointer, tagsize int) int { - return 1 + tagsize -} -func sizeBoolValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toBool() - if !v { - return 0 - } - return 1 + tagsize -} -func sizeBoolPtr(ptr pointer, tagsize int) int { - p := *ptr.toBoolPtr() - if p == nil { - return 0 - } - return 1 + tagsize -} -func sizeBoolSlice(ptr pointer, tagsize int) int { - s := *ptr.toBoolSlice() - return (1 + tagsize) * len(s) -} -func sizeBoolPackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toBoolSlice() - if len(s) == 0 { - return 0 - } - return len(s) + SizeVarint(uint64(len(s))) + tagsize -} -func sizeStringValue(ptr pointer, tagsize int) int { - v := *ptr.toString() - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeStringValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toString() - if v == "" { - return 0 - } - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeStringPtr(ptr pointer, tagsize int) int { - p := *ptr.toStringPtr() - if p == nil { - return 0 - } - v := *p - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeStringSlice(ptr pointer, tagsize int) int { - s := *ptr.toStringSlice() - n := 0 - for _, v := range s { - n += len(v) + SizeVarint(uint64(len(v))) + tagsize - } - return n -} -func sizeBytes(ptr pointer, tagsize int) int { - v := *ptr.toBytes() - if v == nil { - return 0 - } - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeBytes3(ptr pointer, tagsize int) int { - v := *ptr.toBytes() - if len(v) == 0 { - return 0 - } - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeBytesOneof(ptr pointer, tagsize int) int { - v := *ptr.toBytes() - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeBytesSlice(ptr pointer, tagsize int) int { - s := *ptr.toBytesSlice() - n := 0 - for _, v := range s { - n += len(v) + SizeVarint(uint64(len(v))) + tagsize - } - return n -} - -// appendFixed32 appends an encoded fixed32 to b. -func appendFixed32(b []byte, v uint32) []byte { - b = append(b, - byte(v), - byte(v>>8), - byte(v>>16), - byte(v>>24)) - return b -} - -// appendFixed64 appends an encoded fixed64 to b. -func appendFixed64(b []byte, v uint64) []byte { - b = append(b, - byte(v), - byte(v>>8), - byte(v>>16), - byte(v>>24), - byte(v>>32), - byte(v>>40), - byte(v>>48), - byte(v>>56)) - return b -} - -// appendVarint appends an encoded varint to b. -func appendVarint(b []byte, v uint64) []byte { - // TODO: make 1-byte (maybe 2-byte) case inline-able, once we - // have non-leaf inliner. - switch { - case v < 1<<7: - b = append(b, byte(v)) - case v < 1<<14: - b = append(b, - byte(v&0x7f|0x80), - byte(v>>7)) - case v < 1<<21: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte(v>>14)) - case v < 1<<28: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte(v>>21)) - case v < 1<<35: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte(v>>28)) - case v < 1<<42: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte(v>>35)) - case v < 1<<49: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte((v>>35)&0x7f|0x80), - byte(v>>42)) - case v < 1<<56: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte((v>>35)&0x7f|0x80), - byte((v>>42)&0x7f|0x80), - byte(v>>49)) - case v < 1<<63: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte((v>>35)&0x7f|0x80), - byte((v>>42)&0x7f|0x80), - byte((v>>49)&0x7f|0x80), - byte(v>>56)) - default: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte((v>>35)&0x7f|0x80), - byte((v>>42)&0x7f|0x80), - byte((v>>49)&0x7f|0x80), - byte((v>>56)&0x7f|0x80), - 1) - } - return b -} - -func appendFixed32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint32() - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - return b, nil -} -func appendFixed32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - return b, nil -} -func appendFixed32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toUint32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, *p) - return b, nil -} -func appendFixed32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - } - return b, nil -} -func appendFixed32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(4*len(s))) - for _, v := range s { - b = appendFixed32(b, v) - } - return b, nil -} -func appendFixedS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - b = appendVarint(b, wiretag) - b = appendFixed32(b, uint32(v)) - return b, nil -} -func appendFixedS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, uint32(v)) - return b, nil -} -func appendFixedS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := ptr.getInt32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, uint32(*p)) - return b, nil -} -func appendFixedS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed32(b, uint32(v)) - } - return b, nil -} -func appendFixedS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(4*len(s))) - for _, v := range s { - b = appendFixed32(b, uint32(v)) - } - return b, nil -} -func appendFloat32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := math.Float32bits(*ptr.toFloat32()) - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - return b, nil -} -func appendFloat32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := math.Float32bits(*ptr.toFloat32()) - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - return b, nil -} -func appendFloat32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toFloat32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, math.Float32bits(*p)) - return b, nil -} -func appendFloat32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toFloat32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed32(b, math.Float32bits(v)) - } - return b, nil -} -func appendFloat32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toFloat32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(4*len(s))) - for _, v := range s { - b = appendFixed32(b, math.Float32bits(v)) - } - return b, nil -} -func appendFixed64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint64() - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - return b, nil -} -func appendFixed64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - return b, nil -} -func appendFixed64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toUint64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, *p) - return b, nil -} -func appendFixed64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - } - return b, nil -} -func appendFixed64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(8*len(s))) - for _, v := range s { - b = appendFixed64(b, v) - } - return b, nil -} -func appendFixedS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - b = appendVarint(b, wiretag) - b = appendFixed64(b, uint64(v)) - return b, nil -} -func appendFixedS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, uint64(v)) - return b, nil -} -func appendFixedS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toInt64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, uint64(*p)) - return b, nil -} -func appendFixedS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed64(b, uint64(v)) - } - return b, nil -} -func appendFixedS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(8*len(s))) - for _, v := range s { - b = appendFixed64(b, uint64(v)) - } - return b, nil -} -func appendFloat64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := math.Float64bits(*ptr.toFloat64()) - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - return b, nil -} -func appendFloat64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := math.Float64bits(*ptr.toFloat64()) - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - return b, nil -} -func appendFloat64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toFloat64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, math.Float64bits(*p)) - return b, nil -} -func appendFloat64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toFloat64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed64(b, math.Float64bits(v)) - } - return b, nil -} -func appendFloat64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toFloat64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(8*len(s))) - for _, v := range s { - b = appendFixed64(b, math.Float64bits(v)) - } - return b, nil -} -func appendVarint32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint32() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarint32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarint32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toUint32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(*p)) - return b, nil -} -func appendVarint32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarint32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarintS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarintS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarintS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := ptr.getInt32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(*p)) - return b, nil -} -func appendVarintS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarintS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarint64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint64() - b = appendVarint(b, wiretag) - b = appendVarint(b, v) - return b, nil -} -func appendVarint64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, v) - return b, nil -} -func appendVarint64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toUint64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, *p) - return b, nil -} -func appendVarint64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, v) - } - return b, nil -} -func appendVarint64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(v) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, v) - } - return b, nil -} -func appendVarintS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarintS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarintS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toInt64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(*p)) - return b, nil -} -func appendVarintS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarintS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendZigzag32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - return b, nil -} -func appendZigzag32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - return b, nil -} -func appendZigzag32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := ptr.getInt32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - v := *p - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - return b, nil -} -func appendZigzag32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - } - return b, nil -} -func appendZigzag32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - } - return b, nil -} -func appendZigzag64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - return b, nil -} -func appendZigzag64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - return b, nil -} -func appendZigzag64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toInt64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - v := *p - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - return b, nil -} -func appendZigzag64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - } - return b, nil -} -func appendZigzag64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - } - return b, nil -} -func appendBoolValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBool() - b = appendVarint(b, wiretag) - if v { - b = append(b, 1) - } else { - b = append(b, 0) - } - return b, nil -} -func appendBoolValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBool() - if !v { - return b, nil - } - b = appendVarint(b, wiretag) - b = append(b, 1) - return b, nil -} - -func appendBoolPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toBoolPtr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - if *p { - b = append(b, 1) - } else { - b = append(b, 0) - } - return b, nil -} -func appendBoolSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toBoolSlice() - for _, v := range s { - b = appendVarint(b, wiretag) - if v { - b = append(b, 1) - } else { - b = append(b, 0) - } - } - return b, nil -} -func appendBoolPackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toBoolSlice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(len(s))) - for _, v := range s { - if v { - b = append(b, 1) - } else { - b = append(b, 0) - } - } - return b, nil -} -func appendStringValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toString() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendStringValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toString() - if v == "" { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendStringPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toStringPtr() - if p == nil { - return b, nil - } - v := *p - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendStringSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toStringSlice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - } - return b, nil -} -func appendUTF8StringValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - var invalidUTF8 bool - v := *ptr.toString() - if !utf8.ValidString(v) { - invalidUTF8 = true - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - if invalidUTF8 { - return b, errInvalidUTF8 - } - return b, nil -} -func appendUTF8StringValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - var invalidUTF8 bool - v := *ptr.toString() - if v == "" { - return b, nil - } - if !utf8.ValidString(v) { - invalidUTF8 = true - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - if invalidUTF8 { - return b, errInvalidUTF8 - } - return b, nil -} -func appendUTF8StringPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - var invalidUTF8 bool - p := *ptr.toStringPtr() - if p == nil { - return b, nil - } - v := *p - if !utf8.ValidString(v) { - invalidUTF8 = true - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - if invalidUTF8 { - return b, errInvalidUTF8 - } - return b, nil -} -func appendUTF8StringSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - var invalidUTF8 bool - s := *ptr.toStringSlice() - for _, v := range s { - if !utf8.ValidString(v) { - invalidUTF8 = true - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - } - if invalidUTF8 { - return b, errInvalidUTF8 - } - return b, nil -} -func appendBytes(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBytes() - if v == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendBytes3(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBytes() - if len(v) == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendBytesOneof(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBytes() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendBytesSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toBytesSlice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - } - return b, nil -} - -// makeGroupMarshaler returns the sizer and marshaler for a group. -// u is the marshal info of the underlying message. -func makeGroupMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - p := ptr.getPointer() - if p.isNil() { - return 0 - } - return u.size(p) + 2*tagsize - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - p := ptr.getPointer() - if p.isNil() { - return b, nil - } - var err error - b = appendVarint(b, wiretag) // start group - b, err = u.marshal(b, p, deterministic) - b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group - return b, err - } -} - -// makeGroupSliceMarshaler returns the sizer and marshaler for a group slice. -// u is the marshal info of the underlying message. -func makeGroupSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getPointerSlice() - n := 0 - for _, v := range s { - if v.isNil() { - continue - } - n += u.size(v) + 2*tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getPointerSlice() - var err error - var nerr nonFatal - for _, v := range s { - if v.isNil() { - return b, errRepeatedHasNil - } - b = appendVarint(b, wiretag) // start group - b, err = u.marshal(b, v, deterministic) - b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group - if !nerr.Merge(err) { - if err == ErrNil { - err = errRepeatedHasNil - } - return b, err - } - } - return b, nerr.E - } -} - -// makeMessageMarshaler returns the sizer and marshaler for a message field. -// u is the marshal info of the message. -func makeMessageMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - p := ptr.getPointer() - if p.isNil() { - return 0 - } - siz := u.size(p) - return siz + SizeVarint(uint64(siz)) + tagsize - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - p := ptr.getPointer() - if p.isNil() { - return b, nil - } - b = appendVarint(b, wiretag) - siz := u.cachedsize(p) - b = appendVarint(b, uint64(siz)) - return u.marshal(b, p, deterministic) - } -} - -// makeMessageSliceMarshaler returns the sizer and marshaler for a message slice. -// u is the marshal info of the message. -func makeMessageSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getPointerSlice() - n := 0 - for _, v := range s { - if v.isNil() { - continue - } - siz := u.size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getPointerSlice() - var err error - var nerr nonFatal - for _, v := range s { - if v.isNil() { - return b, errRepeatedHasNil - } - b = appendVarint(b, wiretag) - siz := u.cachedsize(v) - b = appendVarint(b, uint64(siz)) - b, err = u.marshal(b, v, deterministic) - - if !nerr.Merge(err) { - if err == ErrNil { - err = errRepeatedHasNil - } - return b, err - } - } - return b, nerr.E - } -} - -// makeMapMarshaler returns the sizer and marshaler for a map field. -// f is the pointer to the reflect data structure of the field. -func makeMapMarshaler(f *reflect.StructField) (sizer, marshaler) { - // figure out key and value type - t := f.Type - keyType := t.Key() - valType := t.Elem() - tags := strings.Split(f.Tag.Get("protobuf"), ",") - keyTags := strings.Split(f.Tag.Get("protobuf_key"), ",") - valTags := strings.Split(f.Tag.Get("protobuf_val"), ",") - stdOptions := false - for _, t := range tags { - if strings.HasPrefix(t, "customtype=") { - valTags = append(valTags, t) - } - if t == "stdtime" { - valTags = append(valTags, t) - stdOptions = true - } - if t == "stdduration" { - valTags = append(valTags, t) - stdOptions = true - } - if t == "wktptr" { - valTags = append(valTags, t) - } - } - keySizer, keyMarshaler := typeMarshaler(keyType, keyTags, false, false) // don't omit zero value in map - valSizer, valMarshaler := typeMarshaler(valType, valTags, false, false) // don't omit zero value in map - keyWireTag := 1<<3 | wiretype(keyTags[0]) - valWireTag := 2<<3 | wiretype(valTags[0]) - - // We create an interface to get the addresses of the map key and value. - // If value is pointer-typed, the interface is a direct interface, the - // idata itself is the value. Otherwise, the idata is the pointer to the - // value. - // Key cannot be pointer-typed. - valIsPtr := valType.Kind() == reflect.Ptr - - // If value is a message with nested maps, calling - // valSizer in marshal may be quadratic. We should use - // cached version in marshal (but not in size). - // If value is not message type, we don't have size cache, - // but it cannot be nested either. Just use valSizer. - valCachedSizer := valSizer - if valIsPtr && !stdOptions && valType.Elem().Kind() == reflect.Struct { - u := getMarshalInfo(valType.Elem()) - valCachedSizer = func(ptr pointer, tagsize int) int { - // Same as message sizer, but use cache. - p := ptr.getPointer() - if p.isNil() { - return 0 - } - siz := u.cachedsize(p) - return siz + SizeVarint(uint64(siz)) + tagsize - } - } - return func(ptr pointer, tagsize int) int { - m := ptr.asPointerTo(t).Elem() // the map - n := 0 - for _, k := range m.MapKeys() { - ki := k.Interface() - vi := m.MapIndex(k).Interface() - kaddr := toAddrPointer(&ki, false) // pointer to key - vaddr := toAddrPointer(&vi, valIsPtr) // pointer to value - siz := keySizer(kaddr, 1) + valSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, tag uint64, deterministic bool) ([]byte, error) { - m := ptr.asPointerTo(t).Elem() // the map - var err error - keys := m.MapKeys() - if len(keys) > 1 && deterministic { - sort.Sort(mapKeys(keys)) - } - - var nerr nonFatal - for _, k := range keys { - ki := k.Interface() - vi := m.MapIndex(k).Interface() - kaddr := toAddrPointer(&ki, false) // pointer to key - vaddr := toAddrPointer(&vi, valIsPtr) // pointer to value - b = appendVarint(b, tag) - siz := keySizer(kaddr, 1) + valCachedSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) - b = appendVarint(b, uint64(siz)) - b, err = keyMarshaler(b, kaddr, keyWireTag, deterministic) - if !nerr.Merge(err) { - return b, err - } - b, err = valMarshaler(b, vaddr, valWireTag, deterministic) - if err != ErrNil && !nerr.Merge(err) { // allow nil value in map - return b, err - } - } - return b, nerr.E - } -} - -// makeOneOfMarshaler returns the sizer and marshaler for a oneof field. -// fi is the marshal info of the field. -// f is the pointer to the reflect data structure of the field. -func makeOneOfMarshaler(fi *marshalFieldInfo, f *reflect.StructField) (sizer, marshaler) { - // Oneof field is an interface. We need to get the actual data type on the fly. - t := f.Type - return func(ptr pointer, _ int) int { - p := ptr.getInterfacePointer() - if p.isNil() { - return 0 - } - v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct - telem := v.Type() - e := fi.oneofElems[telem] - return e.sizer(p, e.tagsize) - }, - func(b []byte, ptr pointer, _ uint64, deterministic bool) ([]byte, error) { - p := ptr.getInterfacePointer() - if p.isNil() { - return b, nil - } - v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct - telem := v.Type() - if telem.Field(0).Type.Kind() == reflect.Ptr && p.getPointer().isNil() { - return b, errOneofHasNil - } - e := fi.oneofElems[telem] - return e.marshaler(b, p, e.wiretag, deterministic) - } -} - -// sizeExtensions computes the size of encoded data for a XXX_InternalExtensions field. -func (u *marshalInfo) sizeExtensions(ext *XXX_InternalExtensions) int { - m, mu := ext.extensionsRead() - if m == nil { - return 0 - } - mu.Lock() - - n := 0 - for _, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - n += len(e.enc) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - n += ei.sizer(p, ei.tagsize) - } - mu.Unlock() - return n -} - -// appendExtensions marshals a XXX_InternalExtensions field to the end of byte slice b. -func (u *marshalInfo) appendExtensions(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { - m, mu := ext.extensionsRead() - if m == nil { - return b, nil - } - mu.Lock() - defer mu.Unlock() - - var err error - var nerr nonFatal - - // Fast-path for common cases: zero or one extensions. - // Don't bother sorting the keys. - if len(m) <= 1 { - for _, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - b = append(b, e.enc...) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, ei.wiretag, deterministic) - if !nerr.Merge(err) { - return b, err - } - } - return b, nerr.E - } - - // Sort the keys to provide a deterministic encoding. - // Not sure this is required, but the old code does it. - keys := make([]int, 0, len(m)) - for k := range m { - keys = append(keys, int(k)) - } - sort.Ints(keys) - - for _, k := range keys { - e := m[int32(k)] - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - b = append(b, e.enc...) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, ei.wiretag, deterministic) - if !nerr.Merge(err) { - return b, err - } - } - return b, nerr.E -} - -// message set format is: -// message MessageSet { -// repeated group Item = 1 { -// required int32 type_id = 2; -// required string message = 3; -// }; -// } - -// sizeMessageSet computes the size of encoded data for a XXX_InternalExtensions field -// in message set format (above). -func (u *marshalInfo) sizeMessageSet(ext *XXX_InternalExtensions) int { - m, mu := ext.extensionsRead() - if m == nil { - return 0 - } - mu.Lock() - - n := 0 - for id, e := range m { - n += 2 // start group, end group. tag = 1 (size=1) - n += SizeVarint(uint64(id)) + 1 // type_id, tag = 2 (size=1) - - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint - siz := len(msgWithLen) - n += siz + 1 // message, tag = 3 (size=1) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - n += ei.sizer(p, 1) // message, tag = 3 (size=1) - } - mu.Unlock() - return n -} - -// appendMessageSet marshals a XXX_InternalExtensions field in message set format (above) -// to the end of byte slice b. -func (u *marshalInfo) appendMessageSet(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { - m, mu := ext.extensionsRead() - if m == nil { - return b, nil - } - mu.Lock() - defer mu.Unlock() - - var err error - var nerr nonFatal - - // Fast-path for common cases: zero or one extensions. - // Don't bother sorting the keys. - if len(m) <= 1 { - for id, e := range m { - b = append(b, 1<<3|WireStartGroup) - b = append(b, 2<<3|WireVarint) - b = appendVarint(b, uint64(id)) - - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint - b = append(b, 3<<3|WireBytes) - b = append(b, msgWithLen...) - b = append(b, 1<<3|WireEndGroup) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) - if !nerr.Merge(err) { - return b, err - } - b = append(b, 1<<3|WireEndGroup) - } - return b, nerr.E - } - - // Sort the keys to provide a deterministic encoding. - keys := make([]int, 0, len(m)) - for k := range m { - keys = append(keys, int(k)) - } - sort.Ints(keys) - - for _, id := range keys { - e := m[int32(id)] - b = append(b, 1<<3|WireStartGroup) - b = append(b, 2<<3|WireVarint) - b = appendVarint(b, uint64(id)) - - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint - b = append(b, 3<<3|WireBytes) - b = append(b, msgWithLen...) - b = append(b, 1<<3|WireEndGroup) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) - b = append(b, 1<<3|WireEndGroup) - if !nerr.Merge(err) { - return b, err - } - } - return b, nerr.E -} - -// sizeV1Extensions computes the size of encoded data for a V1-API extension field. -func (u *marshalInfo) sizeV1Extensions(m map[int32]Extension) int { - if m == nil { - return 0 - } - - n := 0 - for _, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - n += len(e.enc) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - n += ei.sizer(p, ei.tagsize) - } - return n -} - -// appendV1Extensions marshals a V1-API extension field to the end of byte slice b. -func (u *marshalInfo) appendV1Extensions(b []byte, m map[int32]Extension, deterministic bool) ([]byte, error) { - if m == nil { - return b, nil - } - - // Sort the keys to provide a deterministic encoding. - keys := make([]int, 0, len(m)) - for k := range m { - keys = append(keys, int(k)) - } - sort.Ints(keys) - - var err error - var nerr nonFatal - for _, k := range keys { - e := m[int32(k)] - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - b = append(b, e.enc...) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, ei.wiretag, deterministic) - if !nerr.Merge(err) { - return b, err - } - } - return b, nerr.E -} - -// newMarshaler is the interface representing objects that can marshal themselves. -// -// This exists to support protoc-gen-go generated messages. -// The proto package will stop type-asserting to this interface in the future. -// -// DO NOT DEPEND ON THIS. -type newMarshaler interface { - XXX_Size() int - XXX_Marshal(b []byte, deterministic bool) ([]byte, error) -} - -// Size returns the encoded size of a protocol buffer message. -// This is the main entry point. -func Size(pb Message) int { - if m, ok := pb.(newMarshaler); ok { - return m.XXX_Size() - } - if m, ok := pb.(Marshaler); ok { - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - b, _ := m.Marshal() - return len(b) - } - // in case somehow we didn't generate the wrapper - if pb == nil { - return 0 - } - var info InternalMessageInfo - return info.Size(pb) -} - -// Marshal takes a protocol buffer message -// and encodes it into the wire format, returning the data. -// This is the main entry point. -func Marshal(pb Message) ([]byte, error) { - if m, ok := pb.(newMarshaler); ok { - siz := m.XXX_Size() - b := make([]byte, 0, siz) - return m.XXX_Marshal(b, false) - } - if m, ok := pb.(Marshaler); ok { - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - return m.Marshal() - } - // in case somehow we didn't generate the wrapper - if pb == nil { - return nil, ErrNil - } - var info InternalMessageInfo - siz := info.Size(pb) - b := make([]byte, 0, siz) - return info.Marshal(b, pb, false) -} - -// Marshal takes a protocol buffer message -// and encodes it into the wire format, writing the result to the -// Buffer. -// This is an alternative entry point. It is not necessary to use -// a Buffer for most applications. -func (p *Buffer) Marshal(pb Message) error { - var err error - if p.deterministic { - if _, ok := pb.(Marshaler); ok { - return fmt.Errorf("proto: deterministic not supported by the Marshal method of %T", pb) - } - } - if m, ok := pb.(newMarshaler); ok { - siz := m.XXX_Size() - p.grow(siz) // make sure buf has enough capacity - p.buf, err = m.XXX_Marshal(p.buf, p.deterministic) - return err - } - if m, ok := pb.(Marshaler); ok { - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - var b []byte - b, err = m.Marshal() - p.buf = append(p.buf, b...) - return err - } - // in case somehow we didn't generate the wrapper - if pb == nil { - return ErrNil - } - var info InternalMessageInfo - siz := info.Size(pb) - p.grow(siz) // make sure buf has enough capacity - p.buf, err = info.Marshal(p.buf, pb, p.deterministic) - return err -} - -// grow grows the buffer's capacity, if necessary, to guarantee space for -// another n bytes. After grow(n), at least n bytes can be written to the -// buffer without another allocation. -func (p *Buffer) grow(n int) { - need := len(p.buf) + n - if need <= cap(p.buf) { - return - } - newCap := len(p.buf) * 2 - if newCap < need { - newCap = need - } - p.buf = append(make([]byte, 0, newCap), p.buf...) -} diff --git a/vendor/github.com/gogo/protobuf/proto/table_marshal_gogo.go b/vendor/github.com/gogo/protobuf/proto/table_marshal_gogo.go deleted file mode 100644 index 997f57c..0000000 --- a/vendor/github.com/gogo/protobuf/proto/table_marshal_gogo.go +++ /dev/null @@ -1,388 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2018, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "reflect" - "time" -) - -// makeMessageRefMarshaler differs a bit from makeMessageMarshaler -// It marshal a message T instead of a *T -func makeMessageRefMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - siz := u.size(ptr) - return siz + SizeVarint(uint64(siz)) + tagsize - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - b = appendVarint(b, wiretag) - siz := u.cachedsize(ptr) - b = appendVarint(b, uint64(siz)) - return u.marshal(b, ptr, deterministic) - } -} - -// makeMessageRefSliceMarshaler differs quite a lot from makeMessageSliceMarshaler -// It marshals a slice of messages []T instead of []*T -func makeMessageRefSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - e := elem.Interface() - v := toAddrPointer(&e, false) - siz := u.size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - var err, errreq error - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - e := elem.Interface() - v := toAddrPointer(&e, false) - b = appendVarint(b, wiretag) - siz := u.size(v) - b = appendVarint(b, uint64(siz)) - b, err = u.marshal(b, v, deterministic) - - if err != nil { - if _, ok := err.(*RequiredNotSetError); ok { - // Required field in submessage is not set. - // We record the error but keep going, to give a complete marshaling. - if errreq == nil { - errreq = err - } - continue - } - if err == ErrNil { - err = errRepeatedHasNil - } - return b, err - } - } - - return b, errreq - } -} - -func makeCustomPtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - m := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(custom) - siz := m.Size() - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - m := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(custom) - siz := m.Size() - buf, err := m.Marshal() - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - return b, nil - } -} - -func makeCustomMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - m := ptr.asPointerTo(u.typ).Interface().(custom) - siz := m.Size() - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - m := ptr.asPointerTo(u.typ).Interface().(custom) - siz := m.Size() - buf, err := m.Marshal() - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - return b, nil - } -} - -func makeTimeMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*time.Time) - ts, err := timestampProto(*t) - if err != nil { - return 0 - } - siz := Size(ts) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*time.Time) - ts, err := timestampProto(*t) - if err != nil { - return nil, err - } - buf, err := Marshal(ts) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeTimePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*time.Time) - ts, err := timestampProto(*t) - if err != nil { - return 0 - } - siz := Size(ts) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*time.Time) - ts, err := timestampProto(*t) - if err != nil { - return nil, err - } - buf, err := Marshal(ts) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeTimeSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(time.Time) - ts, err := timestampProto(t) - if err != nil { - return 0 - } - siz := Size(ts) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(time.Time) - ts, err := timestampProto(t) - if err != nil { - return nil, err - } - siz := Size(ts) - buf, err := Marshal(ts) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeTimePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*time.Time) - ts, err := timestampProto(*t) - if err != nil { - return 0 - } - siz := Size(ts) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*time.Time) - ts, err := timestampProto(*t) - if err != nil { - return nil, err - } - siz := Size(ts) - buf, err := Marshal(ts) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeDurationMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - d := ptr.asPointerTo(u.typ).Interface().(*time.Duration) - dur := durationProto(*d) - siz := Size(dur) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - d := ptr.asPointerTo(u.typ).Interface().(*time.Duration) - dur := durationProto(*d) - buf, err := Marshal(dur) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeDurationPtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - d := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*time.Duration) - dur := durationProto(*d) - siz := Size(dur) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - d := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*time.Duration) - dur := durationProto(*d) - buf, err := Marshal(dur) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeDurationSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - d := elem.Interface().(time.Duration) - dur := durationProto(d) - siz := Size(dur) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - d := elem.Interface().(time.Duration) - dur := durationProto(d) - siz := Size(dur) - buf, err := Marshal(dur) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeDurationPtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - d := elem.Interface().(*time.Duration) - dur := durationProto(*d) - siz := Size(dur) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - d := elem.Interface().(*time.Duration) - dur := durationProto(*d) - siz := Size(dur) - buf, err := Marshal(dur) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} diff --git a/vendor/github.com/gogo/protobuf/proto/table_merge.go b/vendor/github.com/gogo/protobuf/proto/table_merge.go deleted file mode 100644 index f520106..0000000 --- a/vendor/github.com/gogo/protobuf/proto/table_merge.go +++ /dev/null @@ -1,657 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "fmt" - "reflect" - "strings" - "sync" - "sync/atomic" -) - -// Merge merges the src message into dst. -// This assumes that dst and src of the same type and are non-nil. -func (a *InternalMessageInfo) Merge(dst, src Message) { - mi := atomicLoadMergeInfo(&a.merge) - if mi == nil { - mi = getMergeInfo(reflect.TypeOf(dst).Elem()) - atomicStoreMergeInfo(&a.merge, mi) - } - mi.merge(toPointer(&dst), toPointer(&src)) -} - -type mergeInfo struct { - typ reflect.Type - - initialized int32 // 0: only typ is valid, 1: everything is valid - lock sync.Mutex - - fields []mergeFieldInfo - unrecognized field // Offset of XXX_unrecognized -} - -type mergeFieldInfo struct { - field field // Offset of field, guaranteed to be valid - - // isPointer reports whether the value in the field is a pointer. - // This is true for the following situations: - // * Pointer to struct - // * Pointer to basic type (proto2 only) - // * Slice (first value in slice header is a pointer) - // * String (first value in string header is a pointer) - isPointer bool - - // basicWidth reports the width of the field assuming that it is directly - // embedded in the struct (as is the case for basic types in proto3). - // The possible values are: - // 0: invalid - // 1: bool - // 4: int32, uint32, float32 - // 8: int64, uint64, float64 - basicWidth int - - // Where dst and src are pointers to the types being merged. - merge func(dst, src pointer) -} - -var ( - mergeInfoMap = map[reflect.Type]*mergeInfo{} - mergeInfoLock sync.Mutex -) - -func getMergeInfo(t reflect.Type) *mergeInfo { - mergeInfoLock.Lock() - defer mergeInfoLock.Unlock() - mi := mergeInfoMap[t] - if mi == nil { - mi = &mergeInfo{typ: t} - mergeInfoMap[t] = mi - } - return mi -} - -// merge merges src into dst assuming they are both of type *mi.typ. -func (mi *mergeInfo) merge(dst, src pointer) { - if dst.isNil() { - panic("proto: nil destination") - } - if src.isNil() { - return // Nothing to do. - } - - if atomic.LoadInt32(&mi.initialized) == 0 { - mi.computeMergeInfo() - } - - for _, fi := range mi.fields { - sfp := src.offset(fi.field) - - // As an optimization, we can avoid the merge function call cost - // if we know for sure that the source will have no effect - // by checking if it is the zero value. - if unsafeAllowed { - if fi.isPointer && sfp.getPointer().isNil() { // Could be slice or string - continue - } - if fi.basicWidth > 0 { - switch { - case fi.basicWidth == 1 && !*sfp.toBool(): - continue - case fi.basicWidth == 4 && *sfp.toUint32() == 0: - continue - case fi.basicWidth == 8 && *sfp.toUint64() == 0: - continue - } - } - } - - dfp := dst.offset(fi.field) - fi.merge(dfp, sfp) - } - - // TODO: Make this faster? - out := dst.asPointerTo(mi.typ).Elem() - in := src.asPointerTo(mi.typ).Elem() - if emIn, err := extendable(in.Addr().Interface()); err == nil { - emOut, _ := extendable(out.Addr().Interface()) - mIn, muIn := emIn.extensionsRead() - if mIn != nil { - mOut := emOut.extensionsWrite() - muIn.Lock() - mergeExtension(mOut, mIn) - muIn.Unlock() - } - } - - if mi.unrecognized.IsValid() { - if b := *src.offset(mi.unrecognized).toBytes(); len(b) > 0 { - *dst.offset(mi.unrecognized).toBytes() = append([]byte(nil), b...) - } - } -} - -func (mi *mergeInfo) computeMergeInfo() { - mi.lock.Lock() - defer mi.lock.Unlock() - if mi.initialized != 0 { - return - } - t := mi.typ - n := t.NumField() - - props := GetProperties(t) - for i := 0; i < n; i++ { - f := t.Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - - mfi := mergeFieldInfo{field: toField(&f)} - tf := f.Type - - // As an optimization, we can avoid the merge function call cost - // if we know for sure that the source will have no effect - // by checking if it is the zero value. - if unsafeAllowed { - switch tf.Kind() { - case reflect.Ptr, reflect.Slice, reflect.String: - // As a special case, we assume slices and strings are pointers - // since we know that the first field in the SliceSlice or - // StringHeader is a data pointer. - mfi.isPointer = true - case reflect.Bool: - mfi.basicWidth = 1 - case reflect.Int32, reflect.Uint32, reflect.Float32: - mfi.basicWidth = 4 - case reflect.Int64, reflect.Uint64, reflect.Float64: - mfi.basicWidth = 8 - } - } - - // Unwrap tf to get at its most basic type. - var isPointer, isSlice bool - if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { - isSlice = true - tf = tf.Elem() - } - if tf.Kind() == reflect.Ptr { - isPointer = true - tf = tf.Elem() - } - if isPointer && isSlice && tf.Kind() != reflect.Struct { - panic("both pointer and slice for basic type in " + tf.Name()) - } - - switch tf.Kind() { - case reflect.Int32: - switch { - case isSlice: // E.g., []int32 - mfi.merge = func(dst, src pointer) { - // NOTE: toInt32Slice is not defined (see pointer_reflect.go). - /* - sfsp := src.toInt32Slice() - if *sfsp != nil { - dfsp := dst.toInt32Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []int64{} - } - } - */ - sfs := src.getInt32Slice() - if sfs != nil { - dfs := dst.getInt32Slice() - dfs = append(dfs, sfs...) - if dfs == nil { - dfs = []int32{} - } - dst.setInt32Slice(dfs) - } - } - case isPointer: // E.g., *int32 - mfi.merge = func(dst, src pointer) { - // NOTE: toInt32Ptr is not defined (see pointer_reflect.go). - /* - sfpp := src.toInt32Ptr() - if *sfpp != nil { - dfpp := dst.toInt32Ptr() - if *dfpp == nil { - *dfpp = Int32(**sfpp) - } else { - **dfpp = **sfpp - } - } - */ - sfp := src.getInt32Ptr() - if sfp != nil { - dfp := dst.getInt32Ptr() - if dfp == nil { - dst.setInt32Ptr(*sfp) - } else { - *dfp = *sfp - } - } - } - default: // E.g., int32 - mfi.merge = func(dst, src pointer) { - if v := *src.toInt32(); v != 0 { - *dst.toInt32() = v - } - } - } - case reflect.Int64: - switch { - case isSlice: // E.g., []int64 - mfi.merge = func(dst, src pointer) { - sfsp := src.toInt64Slice() - if *sfsp != nil { - dfsp := dst.toInt64Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []int64{} - } - } - } - case isPointer: // E.g., *int64 - mfi.merge = func(dst, src pointer) { - sfpp := src.toInt64Ptr() - if *sfpp != nil { - dfpp := dst.toInt64Ptr() - if *dfpp == nil { - *dfpp = Int64(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., int64 - mfi.merge = func(dst, src pointer) { - if v := *src.toInt64(); v != 0 { - *dst.toInt64() = v - } - } - } - case reflect.Uint32: - switch { - case isSlice: // E.g., []uint32 - mfi.merge = func(dst, src pointer) { - sfsp := src.toUint32Slice() - if *sfsp != nil { - dfsp := dst.toUint32Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []uint32{} - } - } - } - case isPointer: // E.g., *uint32 - mfi.merge = func(dst, src pointer) { - sfpp := src.toUint32Ptr() - if *sfpp != nil { - dfpp := dst.toUint32Ptr() - if *dfpp == nil { - *dfpp = Uint32(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., uint32 - mfi.merge = func(dst, src pointer) { - if v := *src.toUint32(); v != 0 { - *dst.toUint32() = v - } - } - } - case reflect.Uint64: - switch { - case isSlice: // E.g., []uint64 - mfi.merge = func(dst, src pointer) { - sfsp := src.toUint64Slice() - if *sfsp != nil { - dfsp := dst.toUint64Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []uint64{} - } - } - } - case isPointer: // E.g., *uint64 - mfi.merge = func(dst, src pointer) { - sfpp := src.toUint64Ptr() - if *sfpp != nil { - dfpp := dst.toUint64Ptr() - if *dfpp == nil { - *dfpp = Uint64(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., uint64 - mfi.merge = func(dst, src pointer) { - if v := *src.toUint64(); v != 0 { - *dst.toUint64() = v - } - } - } - case reflect.Float32: - switch { - case isSlice: // E.g., []float32 - mfi.merge = func(dst, src pointer) { - sfsp := src.toFloat32Slice() - if *sfsp != nil { - dfsp := dst.toFloat32Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []float32{} - } - } - } - case isPointer: // E.g., *float32 - mfi.merge = func(dst, src pointer) { - sfpp := src.toFloat32Ptr() - if *sfpp != nil { - dfpp := dst.toFloat32Ptr() - if *dfpp == nil { - *dfpp = Float32(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., float32 - mfi.merge = func(dst, src pointer) { - if v := *src.toFloat32(); v != 0 { - *dst.toFloat32() = v - } - } - } - case reflect.Float64: - switch { - case isSlice: // E.g., []float64 - mfi.merge = func(dst, src pointer) { - sfsp := src.toFloat64Slice() - if *sfsp != nil { - dfsp := dst.toFloat64Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []float64{} - } - } - } - case isPointer: // E.g., *float64 - mfi.merge = func(dst, src pointer) { - sfpp := src.toFloat64Ptr() - if *sfpp != nil { - dfpp := dst.toFloat64Ptr() - if *dfpp == nil { - *dfpp = Float64(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., float64 - mfi.merge = func(dst, src pointer) { - if v := *src.toFloat64(); v != 0 { - *dst.toFloat64() = v - } - } - } - case reflect.Bool: - switch { - case isSlice: // E.g., []bool - mfi.merge = func(dst, src pointer) { - sfsp := src.toBoolSlice() - if *sfsp != nil { - dfsp := dst.toBoolSlice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []bool{} - } - } - } - case isPointer: // E.g., *bool - mfi.merge = func(dst, src pointer) { - sfpp := src.toBoolPtr() - if *sfpp != nil { - dfpp := dst.toBoolPtr() - if *dfpp == nil { - *dfpp = Bool(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., bool - mfi.merge = func(dst, src pointer) { - if v := *src.toBool(); v { - *dst.toBool() = v - } - } - } - case reflect.String: - switch { - case isSlice: // E.g., []string - mfi.merge = func(dst, src pointer) { - sfsp := src.toStringSlice() - if *sfsp != nil { - dfsp := dst.toStringSlice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []string{} - } - } - } - case isPointer: // E.g., *string - mfi.merge = func(dst, src pointer) { - sfpp := src.toStringPtr() - if *sfpp != nil { - dfpp := dst.toStringPtr() - if *dfpp == nil { - *dfpp = String(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., string - mfi.merge = func(dst, src pointer) { - if v := *src.toString(); v != "" { - *dst.toString() = v - } - } - } - case reflect.Slice: - isProto3 := props.Prop[i].proto3 - switch { - case isPointer: - panic("bad pointer in byte slice case in " + tf.Name()) - case tf.Elem().Kind() != reflect.Uint8: - panic("bad element kind in byte slice case in " + tf.Name()) - case isSlice: // E.g., [][]byte - mfi.merge = func(dst, src pointer) { - sbsp := src.toBytesSlice() - if *sbsp != nil { - dbsp := dst.toBytesSlice() - for _, sb := range *sbsp { - if sb == nil { - *dbsp = append(*dbsp, nil) - } else { - *dbsp = append(*dbsp, append([]byte{}, sb...)) - } - } - if *dbsp == nil { - *dbsp = [][]byte{} - } - } - } - default: // E.g., []byte - mfi.merge = func(dst, src pointer) { - sbp := src.toBytes() - if *sbp != nil { - dbp := dst.toBytes() - if !isProto3 || len(*sbp) > 0 { - *dbp = append([]byte{}, *sbp...) - } - } - } - } - case reflect.Struct: - switch { - case !isPointer: - mergeInfo := getMergeInfo(tf) - mfi.merge = func(dst, src pointer) { - mergeInfo.merge(dst, src) - } - case isSlice: // E.g., []*pb.T - mergeInfo := getMergeInfo(tf) - mfi.merge = func(dst, src pointer) { - sps := src.getPointerSlice() - if sps != nil { - dps := dst.getPointerSlice() - for _, sp := range sps { - var dp pointer - if !sp.isNil() { - dp = valToPointer(reflect.New(tf)) - mergeInfo.merge(dp, sp) - } - dps = append(dps, dp) - } - if dps == nil { - dps = []pointer{} - } - dst.setPointerSlice(dps) - } - } - default: // E.g., *pb.T - mergeInfo := getMergeInfo(tf) - mfi.merge = func(dst, src pointer) { - sp := src.getPointer() - if !sp.isNil() { - dp := dst.getPointer() - if dp.isNil() { - dp = valToPointer(reflect.New(tf)) - dst.setPointer(dp) - } - mergeInfo.merge(dp, sp) - } - } - } - case reflect.Map: - switch { - case isPointer || isSlice: - panic("bad pointer or slice in map case in " + tf.Name()) - default: // E.g., map[K]V - mfi.merge = func(dst, src pointer) { - sm := src.asPointerTo(tf).Elem() - if sm.Len() == 0 { - return - } - dm := dst.asPointerTo(tf).Elem() - if dm.IsNil() { - dm.Set(reflect.MakeMap(tf)) - } - - switch tf.Elem().Kind() { - case reflect.Ptr: // Proto struct (e.g., *T) - for _, key := range sm.MapKeys() { - val := sm.MapIndex(key) - val = reflect.ValueOf(Clone(val.Interface().(Message))) - dm.SetMapIndex(key, val) - } - case reflect.Slice: // E.g. Bytes type (e.g., []byte) - for _, key := range sm.MapKeys() { - val := sm.MapIndex(key) - val = reflect.ValueOf(append([]byte{}, val.Bytes()...)) - dm.SetMapIndex(key, val) - } - default: // Basic type (e.g., string) - for _, key := range sm.MapKeys() { - val := sm.MapIndex(key) - dm.SetMapIndex(key, val) - } - } - } - } - case reflect.Interface: - // Must be oneof field. - switch { - case isPointer || isSlice: - panic("bad pointer or slice in interface case in " + tf.Name()) - default: // E.g., interface{} - // TODO: Make this faster? - mfi.merge = func(dst, src pointer) { - su := src.asPointerTo(tf).Elem() - if !su.IsNil() { - du := dst.asPointerTo(tf).Elem() - typ := su.Elem().Type() - if du.IsNil() || du.Elem().Type() != typ { - du.Set(reflect.New(typ.Elem())) // Initialize interface if empty - } - sv := su.Elem().Elem().Field(0) - if sv.Kind() == reflect.Ptr && sv.IsNil() { - return - } - dv := du.Elem().Elem().Field(0) - if dv.Kind() == reflect.Ptr && dv.IsNil() { - dv.Set(reflect.New(sv.Type().Elem())) // Initialize proto message if empty - } - switch sv.Type().Kind() { - case reflect.Ptr: // Proto struct (e.g., *T) - Merge(dv.Interface().(Message), sv.Interface().(Message)) - case reflect.Slice: // E.g. Bytes type (e.g., []byte) - dv.Set(reflect.ValueOf(append([]byte{}, sv.Bytes()...))) - default: // Basic type (e.g., string) - dv.Set(sv) - } - } - } - } - default: - panic(fmt.Sprintf("merger not found for type:%s", tf)) - } - mi.fields = append(mi.fields, mfi) - } - - mi.unrecognized = invalidField - if f, ok := t.FieldByName("XXX_unrecognized"); ok { - if f.Type != reflect.TypeOf([]byte{}) { - panic("expected XXX_unrecognized to be of type []byte") - } - mi.unrecognized = toField(&f) - } - - atomic.StoreInt32(&mi.initialized, 1) -} diff --git a/vendor/github.com/gogo/protobuf/proto/table_unmarshal.go b/vendor/github.com/gogo/protobuf/proto/table_unmarshal.go deleted file mode 100644 index bb2622f..0000000 --- a/vendor/github.com/gogo/protobuf/proto/table_unmarshal.go +++ /dev/null @@ -1,2245 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "errors" - "fmt" - "io" - "math" - "reflect" - "strconv" - "strings" - "sync" - "sync/atomic" - "unicode/utf8" -) - -// Unmarshal is the entry point from the generated .pb.go files. -// This function is not intended to be used by non-generated code. -// This function is not subject to any compatibility guarantee. -// msg contains a pointer to a protocol buffer struct. -// b is the data to be unmarshaled into the protocol buffer. -// a is a pointer to a place to store cached unmarshal information. -func (a *InternalMessageInfo) Unmarshal(msg Message, b []byte) error { - // Load the unmarshal information for this message type. - // The atomic load ensures memory consistency. - u := atomicLoadUnmarshalInfo(&a.unmarshal) - if u == nil { - // Slow path: find unmarshal info for msg, update a with it. - u = getUnmarshalInfo(reflect.TypeOf(msg).Elem()) - atomicStoreUnmarshalInfo(&a.unmarshal, u) - } - // Then do the unmarshaling. - err := u.unmarshal(toPointer(&msg), b) - return err -} - -type unmarshalInfo struct { - typ reflect.Type // type of the protobuf struct - - // 0 = only typ field is initialized - // 1 = completely initialized - initialized int32 - lock sync.Mutex // prevents double initialization - dense []unmarshalFieldInfo // fields indexed by tag # - sparse map[uint64]unmarshalFieldInfo // fields indexed by tag # - reqFields []string // names of required fields - reqMask uint64 // 1< 0 { - // Read tag and wire type. - // Special case 1 and 2 byte varints. - var x uint64 - if b[0] < 128 { - x = uint64(b[0]) - b = b[1:] - } else if len(b) >= 2 && b[1] < 128 { - x = uint64(b[0]&0x7f) + uint64(b[1])<<7 - b = b[2:] - } else { - var n int - x, n = decodeVarint(b) - if n == 0 { - return io.ErrUnexpectedEOF - } - b = b[n:] - } - tag := x >> 3 - wire := int(x) & 7 - - // Dispatch on the tag to one of the unmarshal* functions below. - var f unmarshalFieldInfo - if tag < uint64(len(u.dense)) { - f = u.dense[tag] - } else { - f = u.sparse[tag] - } - if fn := f.unmarshal; fn != nil { - var err error - b, err = fn(b, m.offset(f.field), wire) - if err == nil { - reqMask |= f.reqMask - continue - } - if r, ok := err.(*RequiredNotSetError); ok { - // Remember this error, but keep parsing. We need to produce - // a full parse even if a required field is missing. - if errLater == nil { - errLater = r - } - reqMask |= f.reqMask - continue - } - if err != errInternalBadWireType { - if err == errInvalidUTF8 { - if errLater == nil { - fullName := revProtoTypes[reflect.PtrTo(u.typ)] + "." + f.name - errLater = &invalidUTF8Error{fullName} - } - continue - } - return err - } - // Fragments with bad wire type are treated as unknown fields. - } - - // Unknown tag. - if !u.unrecognized.IsValid() { - // Don't keep unrecognized data; just skip it. - var err error - b, err = skipField(b, wire) - if err != nil { - return err - } - continue - } - // Keep unrecognized data around. - // maybe in extensions, maybe in the unrecognized field. - z := m.offset(u.unrecognized).toBytes() - var emap map[int32]Extension - var e Extension - for _, r := range u.extensionRanges { - if uint64(r.Start) <= tag && tag <= uint64(r.End) { - if u.extensions.IsValid() { - mp := m.offset(u.extensions).toExtensions() - emap = mp.extensionsWrite() - e = emap[int32(tag)] - z = &e.enc - break - } - if u.oldExtensions.IsValid() { - p := m.offset(u.oldExtensions).toOldExtensions() - emap = *p - if emap == nil { - emap = map[int32]Extension{} - *p = emap - } - e = emap[int32(tag)] - z = &e.enc - break - } - if u.bytesExtensions.IsValid() { - z = m.offset(u.bytesExtensions).toBytes() - break - } - panic("no extensions field available") - } - } - // Use wire type to skip data. - var err error - b0 := b - b, err = skipField(b, wire) - if err != nil { - return err - } - *z = encodeVarint(*z, tag<<3|uint64(wire)) - *z = append(*z, b0[:len(b0)-len(b)]...) - - if emap != nil { - emap[int32(tag)] = e - } - } - if reqMask != u.reqMask && errLater == nil { - // A required field of this message is missing. - for _, n := range u.reqFields { - if reqMask&1 == 0 { - errLater = &RequiredNotSetError{n} - } - reqMask >>= 1 - } - } - return errLater -} - -// computeUnmarshalInfo fills in u with information for use -// in unmarshaling protocol buffers of type u.typ. -func (u *unmarshalInfo) computeUnmarshalInfo() { - u.lock.Lock() - defer u.lock.Unlock() - if u.initialized != 0 { - return - } - t := u.typ - n := t.NumField() - - // Set up the "not found" value for the unrecognized byte buffer. - // This is the default for proto3. - u.unrecognized = invalidField - u.extensions = invalidField - u.oldExtensions = invalidField - u.bytesExtensions = invalidField - - // List of the generated type and offset for each oneof field. - type oneofField struct { - ityp reflect.Type // interface type of oneof field - field field // offset in containing message - } - var oneofFields []oneofField - - for i := 0; i < n; i++ { - f := t.Field(i) - if f.Name == "XXX_unrecognized" { - // The byte slice used to hold unrecognized input is special. - if f.Type != reflect.TypeOf(([]byte)(nil)) { - panic("bad type for XXX_unrecognized field: " + f.Type.Name()) - } - u.unrecognized = toField(&f) - continue - } - if f.Name == "XXX_InternalExtensions" { - // Ditto here. - if f.Type != reflect.TypeOf(XXX_InternalExtensions{}) { - panic("bad type for XXX_InternalExtensions field: " + f.Type.Name()) - } - u.extensions = toField(&f) - if f.Tag.Get("protobuf_messageset") == "1" { - u.isMessageSet = true - } - continue - } - if f.Name == "XXX_extensions" { - // An older form of the extensions field. - if f.Type == reflect.TypeOf((map[int32]Extension)(nil)) { - u.oldExtensions = toField(&f) - continue - } else if f.Type == reflect.TypeOf(([]byte)(nil)) { - u.bytesExtensions = toField(&f) - continue - } - panic("bad type for XXX_extensions field: " + f.Type.Name()) - } - if f.Name == "XXX_NoUnkeyedLiteral" || f.Name == "XXX_sizecache" { - continue - } - - oneof := f.Tag.Get("protobuf_oneof") - if oneof != "" { - oneofFields = append(oneofFields, oneofField{f.Type, toField(&f)}) - // The rest of oneof processing happens below. - continue - } - - tags := f.Tag.Get("protobuf") - tagArray := strings.Split(tags, ",") - if len(tagArray) < 2 { - panic("protobuf tag not enough fields in " + t.Name() + "." + f.Name + ": " + tags) - } - tag, err := strconv.Atoi(tagArray[1]) - if err != nil { - panic("protobuf tag field not an integer: " + tagArray[1]) - } - - name := "" - for _, tag := range tagArray[3:] { - if strings.HasPrefix(tag, "name=") { - name = tag[5:] - } - } - - // Extract unmarshaling function from the field (its type and tags). - unmarshal := fieldUnmarshaler(&f) - - // Required field? - var reqMask uint64 - if tagArray[2] == "req" { - bit := len(u.reqFields) - u.reqFields = append(u.reqFields, name) - reqMask = uint64(1) << uint(bit) - // TODO: if we have more than 64 required fields, we end up - // not verifying that all required fields are present. - // Fix this, perhaps using a count of required fields? - } - - // Store the info in the correct slot in the message. - u.setTag(tag, toField(&f), unmarshal, reqMask, name) - } - - // Find any types associated with oneof fields. - // TODO: XXX_OneofFuncs returns more info than we need. Get rid of some of it? - fn := reflect.Zero(reflect.PtrTo(t)).MethodByName("XXX_OneofFuncs") - // gogo: len(oneofFields) > 0 is needed for embedded oneof messages, without a marshaler and unmarshaler - if fn.IsValid() && len(oneofFields) > 0 { - res := fn.Call(nil)[3] // last return value from XXX_OneofFuncs: []interface{} - for i := res.Len() - 1; i >= 0; i-- { - v := res.Index(i) // interface{} - tptr := reflect.ValueOf(v.Interface()).Type() // *Msg_X - typ := tptr.Elem() // Msg_X - - f := typ.Field(0) // oneof implementers have one field - baseUnmarshal := fieldUnmarshaler(&f) - tags := strings.Split(f.Tag.Get("protobuf"), ",") - fieldNum, err := strconv.Atoi(tags[1]) - if err != nil { - panic("protobuf tag field not an integer: " + tags[1]) - } - var name string - for _, tag := range tags { - if strings.HasPrefix(tag, "name=") { - name = strings.TrimPrefix(tag, "name=") - break - } - } - - // Find the oneof field that this struct implements. - // Might take O(n^2) to process all of the oneofs, but who cares. - for _, of := range oneofFields { - if tptr.Implements(of.ityp) { - // We have found the corresponding interface for this struct. - // That lets us know where this struct should be stored - // when we encounter it during unmarshaling. - unmarshal := makeUnmarshalOneof(typ, of.ityp, baseUnmarshal) - u.setTag(fieldNum, of.field, unmarshal, 0, name) - } - } - } - } - - // Get extension ranges, if any. - fn = reflect.Zero(reflect.PtrTo(t)).MethodByName("ExtensionRangeArray") - if fn.IsValid() { - if !u.extensions.IsValid() && !u.oldExtensions.IsValid() && !u.bytesExtensions.IsValid() { - panic("a message with extensions, but no extensions field in " + t.Name()) - } - u.extensionRanges = fn.Call(nil)[0].Interface().([]ExtensionRange) - } - - // Explicitly disallow tag 0. This will ensure we flag an error - // when decoding a buffer of all zeros. Without this code, we - // would decode and skip an all-zero buffer of even length. - // [0 0] is [tag=0/wiretype=varint varint-encoded-0]. - u.setTag(0, zeroField, func(b []byte, f pointer, w int) ([]byte, error) { - return nil, fmt.Errorf("proto: %s: illegal tag 0 (wire type %d)", t, w) - }, 0, "") - - // Set mask for required field check. - u.reqMask = uint64(1)<= 0 && (tag < 16 || tag < 2*n) { // TODO: what are the right numbers here? - for len(u.dense) <= tag { - u.dense = append(u.dense, unmarshalFieldInfo{}) - } - u.dense[tag] = i - return - } - if u.sparse == nil { - u.sparse = map[uint64]unmarshalFieldInfo{} - } - u.sparse[uint64(tag)] = i -} - -// fieldUnmarshaler returns an unmarshaler for the given field. -func fieldUnmarshaler(f *reflect.StructField) unmarshaler { - if f.Type.Kind() == reflect.Map { - return makeUnmarshalMap(f) - } - return typeUnmarshaler(f.Type, f.Tag.Get("protobuf")) -} - -// typeUnmarshaler returns an unmarshaler for the given field type / field tag pair. -func typeUnmarshaler(t reflect.Type, tags string) unmarshaler { - tagArray := strings.Split(tags, ",") - encoding := tagArray[0] - name := "unknown" - ctype := false - isTime := false - isDuration := false - isWktPointer := false - proto3 := false - validateUTF8 := true - for _, tag := range tagArray[3:] { - if strings.HasPrefix(tag, "name=") { - name = tag[5:] - } - if tag == "proto3" { - proto3 = true - } - if strings.HasPrefix(tag, "customtype=") { - ctype = true - } - if tag == "stdtime" { - isTime = true - } - if tag == "stdduration" { - isDuration = true - } - if tag == "wktptr" { - isWktPointer = true - } - } - validateUTF8 = validateUTF8 && proto3 - - // Figure out packaging (pointer, slice, or both) - slice := false - pointer := false - if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { - slice = true - t = t.Elem() - } - if t.Kind() == reflect.Ptr { - pointer = true - t = t.Elem() - } - - if ctype { - if reflect.PtrTo(t).Implements(customType) { - if slice { - return makeUnmarshalCustomSlice(getUnmarshalInfo(t), name) - } - if pointer { - return makeUnmarshalCustomPtr(getUnmarshalInfo(t), name) - } - return makeUnmarshalCustom(getUnmarshalInfo(t), name) - } else { - panic(fmt.Sprintf("custom type: type: %v, does not implement the proto.custom interface", t)) - } - } - - if isTime { - if pointer { - if slice { - return makeUnmarshalTimePtrSlice(getUnmarshalInfo(t), name) - } - return makeUnmarshalTimePtr(getUnmarshalInfo(t), name) - } - if slice { - return makeUnmarshalTimeSlice(getUnmarshalInfo(t), name) - } - return makeUnmarshalTime(getUnmarshalInfo(t), name) - } - - if isDuration { - if pointer { - if slice { - return makeUnmarshalDurationPtrSlice(getUnmarshalInfo(t), name) - } - return makeUnmarshalDurationPtr(getUnmarshalInfo(t), name) - } - if slice { - return makeUnmarshalDurationSlice(getUnmarshalInfo(t), name) - } - return makeUnmarshalDuration(getUnmarshalInfo(t), name) - } - - if isWktPointer { - switch t.Kind() { - case reflect.Float64: - if pointer { - if slice { - return makeStdDoubleValuePtrSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdDoubleValuePtrUnmarshaler(getUnmarshalInfo(t), name) - } - if slice { - return makeStdDoubleValueSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdDoubleValueUnmarshaler(getUnmarshalInfo(t), name) - case reflect.Float32: - if pointer { - if slice { - return makeStdFloatValuePtrSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdFloatValuePtrUnmarshaler(getUnmarshalInfo(t), name) - } - if slice { - return makeStdFloatValueSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdFloatValueUnmarshaler(getUnmarshalInfo(t), name) - case reflect.Int64: - if pointer { - if slice { - return makeStdInt64ValuePtrSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdInt64ValuePtrUnmarshaler(getUnmarshalInfo(t), name) - } - if slice { - return makeStdInt64ValueSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdInt64ValueUnmarshaler(getUnmarshalInfo(t), name) - case reflect.Uint64: - if pointer { - if slice { - return makeStdUInt64ValuePtrSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdUInt64ValuePtrUnmarshaler(getUnmarshalInfo(t), name) - } - if slice { - return makeStdUInt64ValueSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdUInt64ValueUnmarshaler(getUnmarshalInfo(t), name) - case reflect.Int32: - if pointer { - if slice { - return makeStdInt32ValuePtrSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdInt32ValuePtrUnmarshaler(getUnmarshalInfo(t), name) - } - if slice { - return makeStdInt32ValueSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdInt32ValueUnmarshaler(getUnmarshalInfo(t), name) - case reflect.Uint32: - if pointer { - if slice { - return makeStdUInt32ValuePtrSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdUInt32ValuePtrUnmarshaler(getUnmarshalInfo(t), name) - } - if slice { - return makeStdUInt32ValueSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdUInt32ValueUnmarshaler(getUnmarshalInfo(t), name) - case reflect.Bool: - if pointer { - if slice { - return makeStdBoolValuePtrSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdBoolValuePtrUnmarshaler(getUnmarshalInfo(t), name) - } - if slice { - return makeStdBoolValueSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdBoolValueUnmarshaler(getUnmarshalInfo(t), name) - case reflect.String: - if pointer { - if slice { - return makeStdStringValuePtrSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdStringValuePtrUnmarshaler(getUnmarshalInfo(t), name) - } - if slice { - return makeStdStringValueSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdStringValueUnmarshaler(getUnmarshalInfo(t), name) - case uint8SliceType: - if pointer { - if slice { - return makeStdBytesValuePtrSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdBytesValuePtrUnmarshaler(getUnmarshalInfo(t), name) - } - if slice { - return makeStdBytesValueSliceUnmarshaler(getUnmarshalInfo(t), name) - } - return makeStdBytesValueUnmarshaler(getUnmarshalInfo(t), name) - default: - panic(fmt.Sprintf("unknown wktpointer type %#v", t)) - } - } - - // We'll never have both pointer and slice for basic types. - if pointer && slice && t.Kind() != reflect.Struct { - panic("both pointer and slice for basic type in " + t.Name()) - } - - switch t.Kind() { - case reflect.Bool: - if pointer { - return unmarshalBoolPtr - } - if slice { - return unmarshalBoolSlice - } - return unmarshalBoolValue - case reflect.Int32: - switch encoding { - case "fixed32": - if pointer { - return unmarshalFixedS32Ptr - } - if slice { - return unmarshalFixedS32Slice - } - return unmarshalFixedS32Value - case "varint": - // this could be int32 or enum - if pointer { - return unmarshalInt32Ptr - } - if slice { - return unmarshalInt32Slice - } - return unmarshalInt32Value - case "zigzag32": - if pointer { - return unmarshalSint32Ptr - } - if slice { - return unmarshalSint32Slice - } - return unmarshalSint32Value - } - case reflect.Int64: - switch encoding { - case "fixed64": - if pointer { - return unmarshalFixedS64Ptr - } - if slice { - return unmarshalFixedS64Slice - } - return unmarshalFixedS64Value - case "varint": - if pointer { - return unmarshalInt64Ptr - } - if slice { - return unmarshalInt64Slice - } - return unmarshalInt64Value - case "zigzag64": - if pointer { - return unmarshalSint64Ptr - } - if slice { - return unmarshalSint64Slice - } - return unmarshalSint64Value - } - case reflect.Uint32: - switch encoding { - case "fixed32": - if pointer { - return unmarshalFixed32Ptr - } - if slice { - return unmarshalFixed32Slice - } - return unmarshalFixed32Value - case "varint": - if pointer { - return unmarshalUint32Ptr - } - if slice { - return unmarshalUint32Slice - } - return unmarshalUint32Value - } - case reflect.Uint64: - switch encoding { - case "fixed64": - if pointer { - return unmarshalFixed64Ptr - } - if slice { - return unmarshalFixed64Slice - } - return unmarshalFixed64Value - case "varint": - if pointer { - return unmarshalUint64Ptr - } - if slice { - return unmarshalUint64Slice - } - return unmarshalUint64Value - } - case reflect.Float32: - if pointer { - return unmarshalFloat32Ptr - } - if slice { - return unmarshalFloat32Slice - } - return unmarshalFloat32Value - case reflect.Float64: - if pointer { - return unmarshalFloat64Ptr - } - if slice { - return unmarshalFloat64Slice - } - return unmarshalFloat64Value - case reflect.Map: - panic("map type in typeUnmarshaler in " + t.Name()) - case reflect.Slice: - if pointer { - panic("bad pointer in slice case in " + t.Name()) - } - if slice { - return unmarshalBytesSlice - } - return unmarshalBytesValue - case reflect.String: - if validateUTF8 { - if pointer { - return unmarshalUTF8StringPtr - } - if slice { - return unmarshalUTF8StringSlice - } - return unmarshalUTF8StringValue - } - if pointer { - return unmarshalStringPtr - } - if slice { - return unmarshalStringSlice - } - return unmarshalStringValue - case reflect.Struct: - // message or group field - if !pointer { - switch encoding { - case "bytes": - if slice { - return makeUnmarshalMessageSlice(getUnmarshalInfo(t), name) - } - return makeUnmarshalMessage(getUnmarshalInfo(t), name) - } - } - switch encoding { - case "bytes": - if slice { - return makeUnmarshalMessageSlicePtr(getUnmarshalInfo(t), name) - } - return makeUnmarshalMessagePtr(getUnmarshalInfo(t), name) - case "group": - if slice { - return makeUnmarshalGroupSlicePtr(getUnmarshalInfo(t), name) - } - return makeUnmarshalGroupPtr(getUnmarshalInfo(t), name) - } - } - panic(fmt.Sprintf("unmarshaler not found type:%s encoding:%s", t, encoding)) -} - -// Below are all the unmarshalers for individual fields of various types. - -func unmarshalInt64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x) - *f.toInt64() = v - return b, nil -} - -func unmarshalInt64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x) - *f.toInt64Ptr() = &v - return b, nil -} - -func unmarshalInt64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x) - s := f.toInt64Slice() - *s = append(*s, v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x) - s := f.toInt64Slice() - *s = append(*s, v) - return b, nil -} - -func unmarshalSint64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x>>1) ^ int64(x)<<63>>63 - *f.toInt64() = v - return b, nil -} - -func unmarshalSint64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x>>1) ^ int64(x)<<63>>63 - *f.toInt64Ptr() = &v - return b, nil -} - -func unmarshalSint64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x>>1) ^ int64(x)<<63>>63 - s := f.toInt64Slice() - *s = append(*s, v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x>>1) ^ int64(x)<<63>>63 - s := f.toInt64Slice() - *s = append(*s, v) - return b, nil -} - -func unmarshalUint64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint64(x) - *f.toUint64() = v - return b, nil -} - -func unmarshalUint64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint64(x) - *f.toUint64Ptr() = &v - return b, nil -} - -func unmarshalUint64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint64(x) - s := f.toUint64Slice() - *s = append(*s, v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint64(x) - s := f.toUint64Slice() - *s = append(*s, v) - return b, nil -} - -func unmarshalInt32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x) - *f.toInt32() = v - return b, nil -} - -func unmarshalInt32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x) - f.setInt32Ptr(v) - return b, nil -} - -func unmarshalInt32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x) - f.appendInt32Slice(v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x) - f.appendInt32Slice(v) - return b, nil -} - -func unmarshalSint32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x>>1) ^ int32(x)<<31>>31 - *f.toInt32() = v - return b, nil -} - -func unmarshalSint32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x>>1) ^ int32(x)<<31>>31 - f.setInt32Ptr(v) - return b, nil -} - -func unmarshalSint32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x>>1) ^ int32(x)<<31>>31 - f.appendInt32Slice(v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x>>1) ^ int32(x)<<31>>31 - f.appendInt32Slice(v) - return b, nil -} - -func unmarshalUint32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint32(x) - *f.toUint32() = v - return b, nil -} - -func unmarshalUint32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint32(x) - *f.toUint32Ptr() = &v - return b, nil -} - -func unmarshalUint32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint32(x) - s := f.toUint32Slice() - *s = append(*s, v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint32(x) - s := f.toUint32Slice() - *s = append(*s, v) - return b, nil -} - -func unmarshalFixed64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 - *f.toUint64() = v - return b[8:], nil -} - -func unmarshalFixed64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 - *f.toUint64Ptr() = &v - return b[8:], nil -} - -func unmarshalFixed64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 - s := f.toUint64Slice() - *s = append(*s, v) - b = b[8:] - } - return res, nil - } - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 - s := f.toUint64Slice() - *s = append(*s, v) - return b[8:], nil -} - -func unmarshalFixedS64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 - *f.toInt64() = v - return b[8:], nil -} - -func unmarshalFixedS64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 - *f.toInt64Ptr() = &v - return b[8:], nil -} - -func unmarshalFixedS64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 - s := f.toInt64Slice() - *s = append(*s, v) - b = b[8:] - } - return res, nil - } - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 - s := f.toInt64Slice() - *s = append(*s, v) - return b[8:], nil -} - -func unmarshalFixed32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 - *f.toUint32() = v - return b[4:], nil -} - -func unmarshalFixed32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 - *f.toUint32Ptr() = &v - return b[4:], nil -} - -func unmarshalFixed32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 - s := f.toUint32Slice() - *s = append(*s, v) - b = b[4:] - } - return res, nil - } - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 - s := f.toUint32Slice() - *s = append(*s, v) - return b[4:], nil -} - -func unmarshalFixedS32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 - *f.toInt32() = v - return b[4:], nil -} - -func unmarshalFixedS32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 - f.setInt32Ptr(v) - return b[4:], nil -} - -func unmarshalFixedS32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 - f.appendInt32Slice(v) - b = b[4:] - } - return res, nil - } - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 - f.appendInt32Slice(v) - return b[4:], nil -} - -func unmarshalBoolValue(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - // Note: any length varint is allowed, even though any sane - // encoder will use one byte. - // See https://github.com/golang/protobuf/issues/76 - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - // TODO: check if x>1? Tests seem to indicate no. - v := x != 0 - *f.toBool() = v - return b[n:], nil -} - -func unmarshalBoolPtr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - v := x != 0 - *f.toBoolPtr() = &v - return b[n:], nil -} - -func unmarshalBoolSlice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - v := x != 0 - s := f.toBoolSlice() - *s = append(*s, v) - b = b[n:] - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - v := x != 0 - s := f.toBoolSlice() - *s = append(*s, v) - return b[n:], nil -} - -func unmarshalFloat64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) - *f.toFloat64() = v - return b[8:], nil -} - -func unmarshalFloat64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) - *f.toFloat64Ptr() = &v - return b[8:], nil -} - -func unmarshalFloat64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) - s := f.toFloat64Slice() - *s = append(*s, v) - b = b[8:] - } - return res, nil - } - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) - s := f.toFloat64Slice() - *s = append(*s, v) - return b[8:], nil -} - -func unmarshalFloat32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) - *f.toFloat32() = v - return b[4:], nil -} - -func unmarshalFloat32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) - *f.toFloat32Ptr() = &v - return b[4:], nil -} - -func unmarshalFloat32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) - s := f.toFloat32Slice() - *s = append(*s, v) - b = b[4:] - } - return res, nil - } - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) - s := f.toFloat32Slice() - *s = append(*s, v) - return b[4:], nil -} - -func unmarshalStringValue(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - *f.toString() = v - return b[x:], nil -} - -func unmarshalStringPtr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - *f.toStringPtr() = &v - return b[x:], nil -} - -func unmarshalStringSlice(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - s := f.toStringSlice() - *s = append(*s, v) - return b[x:], nil -} - -func unmarshalUTF8StringValue(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - *f.toString() = v - if !utf8.ValidString(v) { - return b[x:], errInvalidUTF8 - } - return b[x:], nil -} - -func unmarshalUTF8StringPtr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - *f.toStringPtr() = &v - if !utf8.ValidString(v) { - return b[x:], errInvalidUTF8 - } - return b[x:], nil -} - -func unmarshalUTF8StringSlice(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - s := f.toStringSlice() - *s = append(*s, v) - if !utf8.ValidString(v) { - return b[x:], errInvalidUTF8 - } - return b[x:], nil -} - -var emptyBuf [0]byte - -func unmarshalBytesValue(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - // The use of append here is a trick which avoids the zeroing - // that would be required if we used a make/copy pair. - // We append to emptyBuf instead of nil because we want - // a non-nil result even when the length is 0. - v := append(emptyBuf[:], b[:x]...) - *f.toBytes() = v - return b[x:], nil -} - -func unmarshalBytesSlice(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := append(emptyBuf[:], b[:x]...) - s := f.toBytesSlice() - *s = append(*s, v) - return b[x:], nil -} - -func makeUnmarshalMessagePtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - // First read the message field to see if something is there. - // The semantics of multiple submessages are weird. Instead of - // the last one winning (as it is for all other fields), multiple - // submessages are merged. - v := f.getPointer() - if v.isNil() { - v = valToPointer(reflect.New(sub.typ)) - f.setPointer(v) - } - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - return b[x:], err - } -} - -func makeUnmarshalMessageSlicePtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := valToPointer(reflect.New(sub.typ)) - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - f.appendPointer(v) - return b[x:], err - } -} - -func makeUnmarshalGroupPtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireStartGroup { - return b, errInternalBadWireType - } - x, y := findEndGroup(b) - if x < 0 { - return nil, io.ErrUnexpectedEOF - } - v := f.getPointer() - if v.isNil() { - v = valToPointer(reflect.New(sub.typ)) - f.setPointer(v) - } - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - return b[y:], err - } -} - -func makeUnmarshalGroupSlicePtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireStartGroup { - return b, errInternalBadWireType - } - x, y := findEndGroup(b) - if x < 0 { - return nil, io.ErrUnexpectedEOF - } - v := valToPointer(reflect.New(sub.typ)) - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - f.appendPointer(v) - return b[y:], err - } -} - -func makeUnmarshalMap(f *reflect.StructField) unmarshaler { - t := f.Type - kt := t.Key() - vt := t.Elem() - tagArray := strings.Split(f.Tag.Get("protobuf"), ",") - valTags := strings.Split(f.Tag.Get("protobuf_val"), ",") - for _, t := range tagArray { - if strings.HasPrefix(t, "customtype=") { - valTags = append(valTags, t) - } - if t == "stdtime" { - valTags = append(valTags, t) - } - if t == "stdduration" { - valTags = append(valTags, t) - } - if t == "wktptr" { - valTags = append(valTags, t) - } - } - unmarshalKey := typeUnmarshaler(kt, f.Tag.Get("protobuf_key")) - unmarshalVal := typeUnmarshaler(vt, strings.Join(valTags, ",")) - return func(b []byte, f pointer, w int) ([]byte, error) { - // The map entry is a submessage. Figure out how big it is. - if w != WireBytes { - return nil, fmt.Errorf("proto: bad wiretype for map field: got %d want %d", w, WireBytes) - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - r := b[x:] // unused data to return - b = b[:x] // data for map entry - - // Note: we could use #keys * #values ~= 200 functions - // to do map decoding without reflection. Probably not worth it. - // Maps will be somewhat slow. Oh well. - - // Read key and value from data. - var nerr nonFatal - k := reflect.New(kt) - v := reflect.New(vt) - for len(b) > 0 { - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - wire := int(x) & 7 - b = b[n:] - - var err error - switch x >> 3 { - case 1: - b, err = unmarshalKey(b, valToPointer(k), wire) - case 2: - b, err = unmarshalVal(b, valToPointer(v), wire) - default: - err = errInternalBadWireType // skip unknown tag - } - - if nerr.Merge(err) { - continue - } - if err != errInternalBadWireType { - return nil, err - } - - // Skip past unknown fields. - b, err = skipField(b, wire) - if err != nil { - return nil, err - } - } - - // Get map, allocate if needed. - m := f.asPointerTo(t).Elem() // an addressable map[K]T - if m.IsNil() { - m.Set(reflect.MakeMap(t)) - } - - // Insert into map. - m.SetMapIndex(k.Elem(), v.Elem()) - - return r, nerr.E - } -} - -// makeUnmarshalOneof makes an unmarshaler for oneof fields. -// for: -// message Msg { -// oneof F { -// int64 X = 1; -// float64 Y = 2; -// } -// } -// typ is the type of the concrete entry for a oneof case (e.g. Msg_X). -// ityp is the interface type of the oneof field (e.g. isMsg_F). -// unmarshal is the unmarshaler for the base type of the oneof case (e.g. int64). -// Note that this function will be called once for each case in the oneof. -func makeUnmarshalOneof(typ, ityp reflect.Type, unmarshal unmarshaler) unmarshaler { - sf := typ.Field(0) - field0 := toField(&sf) - return func(b []byte, f pointer, w int) ([]byte, error) { - // Allocate holder for value. - v := reflect.New(typ) - - // Unmarshal data into holder. - // We unmarshal into the first field of the holder object. - var err error - var nerr nonFatal - b, err = unmarshal(b, valToPointer(v).offset(field0), w) - if !nerr.Merge(err) { - return nil, err - } - - // Write pointer to holder into target field. - f.asPointerTo(ityp).Elem().Set(v) - - return b, nerr.E - } -} - -// Error used by decode internally. -var errInternalBadWireType = errors.New("proto: internal error: bad wiretype") - -// skipField skips past a field of type wire and returns the remaining bytes. -func skipField(b []byte, wire int) ([]byte, error) { - switch wire { - case WireVarint: - _, k := decodeVarint(b) - if k == 0 { - return b, io.ErrUnexpectedEOF - } - b = b[k:] - case WireFixed32: - if len(b) < 4 { - return b, io.ErrUnexpectedEOF - } - b = b[4:] - case WireFixed64: - if len(b) < 8 { - return b, io.ErrUnexpectedEOF - } - b = b[8:] - case WireBytes: - m, k := decodeVarint(b) - if k == 0 || uint64(len(b)-k) < m { - return b, io.ErrUnexpectedEOF - } - b = b[uint64(k)+m:] - case WireStartGroup: - _, i := findEndGroup(b) - if i == -1 { - return b, io.ErrUnexpectedEOF - } - b = b[i:] - default: - return b, fmt.Errorf("proto: can't skip unknown wire type %d", wire) - } - return b, nil -} - -// findEndGroup finds the index of the next EndGroup tag. -// Groups may be nested, so the "next" EndGroup tag is the first -// unpaired EndGroup. -// findEndGroup returns the indexes of the start and end of the EndGroup tag. -// Returns (-1,-1) if it can't find one. -func findEndGroup(b []byte) (int, int) { - depth := 1 - i := 0 - for { - x, n := decodeVarint(b[i:]) - if n == 0 { - return -1, -1 - } - j := i - i += n - switch x & 7 { - case WireVarint: - _, k := decodeVarint(b[i:]) - if k == 0 { - return -1, -1 - } - i += k - case WireFixed32: - if len(b)-4 < i { - return -1, -1 - } - i += 4 - case WireFixed64: - if len(b)-8 < i { - return -1, -1 - } - i += 8 - case WireBytes: - m, k := decodeVarint(b[i:]) - if k == 0 { - return -1, -1 - } - i += k - if uint64(len(b)-i) < m { - return -1, -1 - } - i += int(m) - case WireStartGroup: - depth++ - case WireEndGroup: - depth-- - if depth == 0 { - return j, i - } - default: - return -1, -1 - } - } -} - -// encodeVarint appends a varint-encoded integer to b and returns the result. -func encodeVarint(b []byte, x uint64) []byte { - for x >= 1<<7 { - b = append(b, byte(x&0x7f|0x80)) - x >>= 7 - } - return append(b, byte(x)) -} - -// decodeVarint reads a varint-encoded integer from b. -// Returns the decoded integer and the number of bytes read. -// If there is an error, it returns 0,0. -func decodeVarint(b []byte) (uint64, int) { - var x, y uint64 - if len(b) == 0 { - goto bad - } - x = uint64(b[0]) - if x < 0x80 { - return x, 1 - } - x -= 0x80 - - if len(b) <= 1 { - goto bad - } - y = uint64(b[1]) - x += y << 7 - if y < 0x80 { - return x, 2 - } - x -= 0x80 << 7 - - if len(b) <= 2 { - goto bad - } - y = uint64(b[2]) - x += y << 14 - if y < 0x80 { - return x, 3 - } - x -= 0x80 << 14 - - if len(b) <= 3 { - goto bad - } - y = uint64(b[3]) - x += y << 21 - if y < 0x80 { - return x, 4 - } - x -= 0x80 << 21 - - if len(b) <= 4 { - goto bad - } - y = uint64(b[4]) - x += y << 28 - if y < 0x80 { - return x, 5 - } - x -= 0x80 << 28 - - if len(b) <= 5 { - goto bad - } - y = uint64(b[5]) - x += y << 35 - if y < 0x80 { - return x, 6 - } - x -= 0x80 << 35 - - if len(b) <= 6 { - goto bad - } - y = uint64(b[6]) - x += y << 42 - if y < 0x80 { - return x, 7 - } - x -= 0x80 << 42 - - if len(b) <= 7 { - goto bad - } - y = uint64(b[7]) - x += y << 49 - if y < 0x80 { - return x, 8 - } - x -= 0x80 << 49 - - if len(b) <= 8 { - goto bad - } - y = uint64(b[8]) - x += y << 56 - if y < 0x80 { - return x, 9 - } - x -= 0x80 << 56 - - if len(b) <= 9 { - goto bad - } - y = uint64(b[9]) - x += y << 63 - if y < 2 { - return x, 10 - } - -bad: - return 0, 0 -} diff --git a/vendor/github.com/gogo/protobuf/proto/table_unmarshal_gogo.go b/vendor/github.com/gogo/protobuf/proto/table_unmarshal_gogo.go deleted file mode 100644 index 00d6c7a..0000000 --- a/vendor/github.com/gogo/protobuf/proto/table_unmarshal_gogo.go +++ /dev/null @@ -1,385 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2018, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "io" - "reflect" -) - -func makeUnmarshalMessage(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - // First read the message field to see if something is there. - // The semantics of multiple submessages are weird. Instead of - // the last one winning (as it is for all other fields), multiple - // submessages are merged. - v := f // gogo: changed from v := f.getPointer() - if v.isNil() { - v = valToPointer(reflect.New(sub.typ)) - f.setPointer(v) - } - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - return b[x:], err - } -} - -func makeUnmarshalMessageSlice(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := valToPointer(reflect.New(sub.typ)) - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - f.appendRef(v, sub.typ) // gogo: changed from f.appendPointer(v) - return b[x:], err - } -} - -func makeUnmarshalCustomPtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.New(sub.typ)) - m := s.Interface().(custom) - if err := m.Unmarshal(b[:x]); err != nil { - return nil, err - } - return b[x:], nil - } -} - -func makeUnmarshalCustomSlice(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := reflect.New(sub.typ) - c := m.Interface().(custom) - if err := c.Unmarshal(b[:x]); err != nil { - return nil, err - } - v := valToPointer(m) - f.appendRef(v, sub.typ) - return b[x:], nil - } -} - -func makeUnmarshalCustom(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - - m := f.asPointerTo(sub.typ).Interface().(custom) - if err := m.Unmarshal(b[:x]); err != nil { - return nil, err - } - return b[x:], nil - } -} - -func makeUnmarshalTime(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := ×tamp{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - t, err := timestampFromProto(m) - if err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(t)) - return b[x:], nil - } -} - -func makeUnmarshalTimePtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := ×tamp{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - t, err := timestampFromProto(m) - if err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&t)) - return b[x:], nil - } -} - -func makeUnmarshalTimePtrSlice(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := ×tamp{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - t, err := timestampFromProto(m) - if err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&t)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeUnmarshalTimeSlice(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := ×tamp{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - t, err := timestampFromProto(m) - if err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(t)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeUnmarshalDurationPtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &duration{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - d, err := durationFromProto(m) - if err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&d)) - return b[x:], nil - } -} - -func makeUnmarshalDuration(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &duration{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - d, err := durationFromProto(m) - if err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(d)) - return b[x:], nil - } -} - -func makeUnmarshalDurationPtrSlice(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &duration{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - d, err := durationFromProto(m) - if err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&d)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeUnmarshalDurationSlice(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &duration{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - d, err := durationFromProto(m) - if err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(d)) - slice.Set(newSlice) - return b[x:], nil - } -} diff --git a/vendor/github.com/gogo/protobuf/proto/text.go b/vendor/github.com/gogo/protobuf/proto/text.go deleted file mode 100644 index 0407ba8..0000000 --- a/vendor/github.com/gogo/protobuf/proto/text.go +++ /dev/null @@ -1,928 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -// Functions for writing the text protocol buffer format. - -import ( - "bufio" - "bytes" - "encoding" - "errors" - "fmt" - "io" - "log" - "math" - "reflect" - "sort" - "strings" - "sync" - "time" -) - -var ( - newline = []byte("\n") - spaces = []byte(" ") - endBraceNewline = []byte("}\n") - backslashN = []byte{'\\', 'n'} - backslashR = []byte{'\\', 'r'} - backslashT = []byte{'\\', 't'} - backslashDQ = []byte{'\\', '"'} - backslashBS = []byte{'\\', '\\'} - posInf = []byte("inf") - negInf = []byte("-inf") - nan = []byte("nan") -) - -type writer interface { - io.Writer - WriteByte(byte) error -} - -// textWriter is an io.Writer that tracks its indentation level. -type textWriter struct { - ind int - complete bool // if the current position is a complete line - compact bool // whether to write out as a one-liner - w writer -} - -func (w *textWriter) WriteString(s string) (n int, err error) { - if !strings.Contains(s, "\n") { - if !w.compact && w.complete { - w.writeIndent() - } - w.complete = false - return io.WriteString(w.w, s) - } - // WriteString is typically called without newlines, so this - // codepath and its copy are rare. We copy to avoid - // duplicating all of Write's logic here. - return w.Write([]byte(s)) -} - -func (w *textWriter) Write(p []byte) (n int, err error) { - newlines := bytes.Count(p, newline) - if newlines == 0 { - if !w.compact && w.complete { - w.writeIndent() - } - n, err = w.w.Write(p) - w.complete = false - return n, err - } - - frags := bytes.SplitN(p, newline, newlines+1) - if w.compact { - for i, frag := range frags { - if i > 0 { - if err := w.w.WriteByte(' '); err != nil { - return n, err - } - n++ - } - nn, err := w.w.Write(frag) - n += nn - if err != nil { - return n, err - } - } - return n, nil - } - - for i, frag := range frags { - if w.complete { - w.writeIndent() - } - nn, err := w.w.Write(frag) - n += nn - if err != nil { - return n, err - } - if i+1 < len(frags) { - if err := w.w.WriteByte('\n'); err != nil { - return n, err - } - n++ - } - } - w.complete = len(frags[len(frags)-1]) == 0 - return n, nil -} - -func (w *textWriter) WriteByte(c byte) error { - if w.compact && c == '\n' { - c = ' ' - } - if !w.compact && w.complete { - w.writeIndent() - } - err := w.w.WriteByte(c) - w.complete = c == '\n' - return err -} - -func (w *textWriter) indent() { w.ind++ } - -func (w *textWriter) unindent() { - if w.ind == 0 { - log.Print("proto: textWriter unindented too far") - return - } - w.ind-- -} - -func writeName(w *textWriter, props *Properties) error { - if _, err := w.WriteString(props.OrigName); err != nil { - return err - } - if props.Wire != "group" { - return w.WriteByte(':') - } - return nil -} - -func requiresQuotes(u string) bool { - // When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted. - for _, ch := range u { - switch { - case ch == '.' || ch == '/' || ch == '_': - continue - case '0' <= ch && ch <= '9': - continue - case 'A' <= ch && ch <= 'Z': - continue - case 'a' <= ch && ch <= 'z': - continue - default: - return true - } - } - return false -} - -// isAny reports whether sv is a google.protobuf.Any message -func isAny(sv reflect.Value) bool { - type wkt interface { - XXX_WellKnownType() string - } - t, ok := sv.Addr().Interface().(wkt) - return ok && t.XXX_WellKnownType() == "Any" -} - -// writeProto3Any writes an expanded google.protobuf.Any message. -// -// It returns (false, nil) if sv value can't be unmarshaled (e.g. because -// required messages are not linked in). -// -// It returns (true, error) when sv was written in expanded format or an error -// was encountered. -func (tm *TextMarshaler) writeProto3Any(w *textWriter, sv reflect.Value) (bool, error) { - turl := sv.FieldByName("TypeUrl") - val := sv.FieldByName("Value") - if !turl.IsValid() || !val.IsValid() { - return true, errors.New("proto: invalid google.protobuf.Any message") - } - - b, ok := val.Interface().([]byte) - if !ok { - return true, errors.New("proto: invalid google.protobuf.Any message") - } - - parts := strings.Split(turl.String(), "/") - mt := MessageType(parts[len(parts)-1]) - if mt == nil { - return false, nil - } - m := reflect.New(mt.Elem()) - if err := Unmarshal(b, m.Interface().(Message)); err != nil { - return false, nil - } - w.Write([]byte("[")) - u := turl.String() - if requiresQuotes(u) { - writeString(w, u) - } else { - w.Write([]byte(u)) - } - if w.compact { - w.Write([]byte("]:<")) - } else { - w.Write([]byte("]: <\n")) - w.ind++ - } - if err := tm.writeStruct(w, m.Elem()); err != nil { - return true, err - } - if w.compact { - w.Write([]byte("> ")) - } else { - w.ind-- - w.Write([]byte(">\n")) - } - return true, nil -} - -func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { - if tm.ExpandAny && isAny(sv) { - if canExpand, err := tm.writeProto3Any(w, sv); canExpand { - return err - } - } - st := sv.Type() - sprops := GetProperties(st) - for i := 0; i < sv.NumField(); i++ { - fv := sv.Field(i) - props := sprops.Prop[i] - name := st.Field(i).Name - - if name == "XXX_NoUnkeyedLiteral" { - continue - } - - if strings.HasPrefix(name, "XXX_") { - // There are two XXX_ fields: - // XXX_unrecognized []byte - // XXX_extensions map[int32]proto.Extension - // The first is handled here; - // the second is handled at the bottom of this function. - if name == "XXX_unrecognized" && !fv.IsNil() { - if err := writeUnknownStruct(w, fv.Interface().([]byte)); err != nil { - return err - } - } - continue - } - if fv.Kind() == reflect.Ptr && fv.IsNil() { - // Field not filled in. This could be an optional field or - // a required field that wasn't filled in. Either way, there - // isn't anything we can show for it. - continue - } - if fv.Kind() == reflect.Slice && fv.IsNil() { - // Repeated field that is empty, or a bytes field that is unused. - continue - } - - if props.Repeated && fv.Kind() == reflect.Slice { - // Repeated field. - for j := 0; j < fv.Len(); j++ { - if err := writeName(w, props); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - v := fv.Index(j) - if v.Kind() == reflect.Ptr && v.IsNil() { - // A nil message in a repeated field is not valid, - // but we can handle that more gracefully than panicking. - if _, err := w.Write([]byte("\n")); err != nil { - return err - } - continue - } - if len(props.Enum) > 0 { - if err := tm.writeEnum(w, v, props); err != nil { - return err - } - } else if err := tm.writeAny(w, v, props); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - continue - } - if fv.Kind() == reflect.Map { - // Map fields are rendered as a repeated struct with key/value fields. - keys := fv.MapKeys() - sort.Sort(mapKeys(keys)) - for _, key := range keys { - val := fv.MapIndex(key) - if err := writeName(w, props); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - // open struct - if err := w.WriteByte('<'); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte('\n'); err != nil { - return err - } - } - w.indent() - // key - if _, err := w.WriteString("key:"); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if err := tm.writeAny(w, key, props.MapKeyProp); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - // nil values aren't legal, but we can avoid panicking because of them. - if val.Kind() != reflect.Ptr || !val.IsNil() { - // value - if _, err := w.WriteString("value:"); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if err := tm.writeAny(w, val, props.MapValProp); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - // close struct - w.unindent() - if err := w.WriteByte('>'); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - continue - } - if props.proto3 && fv.Kind() == reflect.Slice && fv.Len() == 0 { - // empty bytes field - continue - } - if props.proto3 && fv.Kind() != reflect.Ptr && fv.Kind() != reflect.Slice { - // proto3 non-repeated scalar field; skip if zero value - if isProto3Zero(fv) { - continue - } - } - - if fv.Kind() == reflect.Interface { - // Check if it is a oneof. - if st.Field(i).Tag.Get("protobuf_oneof") != "" { - // fv is nil, or holds a pointer to generated struct. - // That generated struct has exactly one field, - // which has a protobuf struct tag. - if fv.IsNil() { - continue - } - inner := fv.Elem().Elem() // interface -> *T -> T - tag := inner.Type().Field(0).Tag.Get("protobuf") - props = new(Properties) // Overwrite the outer props var, but not its pointee. - props.Parse(tag) - // Write the value in the oneof, not the oneof itself. - fv = inner.Field(0) - - // Special case to cope with malformed messages gracefully: - // If the value in the oneof is a nil pointer, don't panic - // in writeAny. - if fv.Kind() == reflect.Ptr && fv.IsNil() { - // Use errors.New so writeAny won't render quotes. - msg := errors.New("/* nil */") - fv = reflect.ValueOf(&msg).Elem() - } - } - } - - if err := writeName(w, props); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - - if len(props.Enum) > 0 { - if err := tm.writeEnum(w, fv, props); err != nil { - return err - } - } else if err := tm.writeAny(w, fv, props); err != nil { - return err - } - - if err := w.WriteByte('\n'); err != nil { - return err - } - } - - // Extensions (the XXX_extensions field). - pv := sv - if pv.CanAddr() { - pv = sv.Addr() - } else { - pv = reflect.New(sv.Type()) - pv.Elem().Set(sv) - } - if _, err := extendable(pv.Interface()); err == nil { - if err := tm.writeExtensions(w, pv); err != nil { - return err - } - } - - return nil -} - -// writeAny writes an arbitrary field. -func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Properties) error { - v = reflect.Indirect(v) - - if props != nil { - if len(props.CustomType) > 0 { - custom, ok := v.Interface().(Marshaler) - if ok { - data, err := custom.Marshal() - if err != nil { - return err - } - if err := writeString(w, string(data)); err != nil { - return err - } - return nil - } - } else if len(props.CastType) > 0 { - if _, ok := v.Interface().(interface { - String() string - }); ok { - switch v.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - _, err := fmt.Fprintf(w, "%d", v.Interface()) - return err - } - } - } else if props.StdTime { - t, ok := v.Interface().(time.Time) - if !ok { - return fmt.Errorf("stdtime is not time.Time, but %T", v.Interface()) - } - tproto, err := timestampProto(t) - if err != nil { - return err - } - propsCopy := *props // Make a copy so that this is goroutine-safe - propsCopy.StdTime = false - err = tm.writeAny(w, reflect.ValueOf(tproto), &propsCopy) - return err - } else if props.StdDuration { - d, ok := v.Interface().(time.Duration) - if !ok { - return fmt.Errorf("stdtime is not time.Duration, but %T", v.Interface()) - } - dproto := durationProto(d) - propsCopy := *props // Make a copy so that this is goroutine-safe - propsCopy.StdDuration = false - err := tm.writeAny(w, reflect.ValueOf(dproto), &propsCopy) - return err - } - } - - // Floats have special cases. - if v.Kind() == reflect.Float32 || v.Kind() == reflect.Float64 { - x := v.Float() - var b []byte - switch { - case math.IsInf(x, 1): - b = posInf - case math.IsInf(x, -1): - b = negInf - case math.IsNaN(x): - b = nan - } - if b != nil { - _, err := w.Write(b) - return err - } - // Other values are handled below. - } - - // We don't attempt to serialise every possible value type; only those - // that can occur in protocol buffers. - switch v.Kind() { - case reflect.Slice: - // Should only be a []byte; repeated fields are handled in writeStruct. - if err := writeString(w, string(v.Bytes())); err != nil { - return err - } - case reflect.String: - if err := writeString(w, v.String()); err != nil { - return err - } - case reflect.Struct: - // Required/optional group/message. - var bra, ket byte = '<', '>' - if props != nil && props.Wire == "group" { - bra, ket = '{', '}' - } - if err := w.WriteByte(bra); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte('\n'); err != nil { - return err - } - } - w.indent() - if v.CanAddr() { - // Calling v.Interface on a struct causes the reflect package to - // copy the entire struct. This is racy with the new Marshaler - // since we atomically update the XXX_sizecache. - // - // Thus, we retrieve a pointer to the struct if possible to avoid - // a race since v.Interface on the pointer doesn't copy the struct. - // - // If v is not addressable, then we are not worried about a race - // since it implies that the binary Marshaler cannot possibly be - // mutating this value. - v = v.Addr() - } - if etm, ok := v.Interface().(encoding.TextMarshaler); ok { - text, err := etm.MarshalText() - if err != nil { - return err - } - if _, err = w.Write(text); err != nil { - return err - } - } else { - if v.Kind() == reflect.Ptr { - v = v.Elem() - } - if err := tm.writeStruct(w, v); err != nil { - return err - } - } - w.unindent() - if err := w.WriteByte(ket); err != nil { - return err - } - default: - _, err := fmt.Fprint(w, v.Interface()) - return err - } - return nil -} - -// equivalent to C's isprint. -func isprint(c byte) bool { - return c >= 0x20 && c < 0x7f -} - -// writeString writes a string in the protocol buffer text format. -// It is similar to strconv.Quote except we don't use Go escape sequences, -// we treat the string as a byte sequence, and we use octal escapes. -// These differences are to maintain interoperability with the other -// languages' implementations of the text format. -func writeString(w *textWriter, s string) error { - // use WriteByte here to get any needed indent - if err := w.WriteByte('"'); err != nil { - return err - } - // Loop over the bytes, not the runes. - for i := 0; i < len(s); i++ { - var err error - // Divergence from C++: we don't escape apostrophes. - // There's no need to escape them, and the C++ parser - // copes with a naked apostrophe. - switch c := s[i]; c { - case '\n': - _, err = w.w.Write(backslashN) - case '\r': - _, err = w.w.Write(backslashR) - case '\t': - _, err = w.w.Write(backslashT) - case '"': - _, err = w.w.Write(backslashDQ) - case '\\': - _, err = w.w.Write(backslashBS) - default: - if isprint(c) { - err = w.w.WriteByte(c) - } else { - _, err = fmt.Fprintf(w.w, "\\%03o", c) - } - } - if err != nil { - return err - } - } - return w.WriteByte('"') -} - -func writeUnknownStruct(w *textWriter, data []byte) (err error) { - if !w.compact { - if _, err := fmt.Fprintf(w, "/* %d unknown bytes */\n", len(data)); err != nil { - return err - } - } - b := NewBuffer(data) - for b.index < len(b.buf) { - x, err := b.DecodeVarint() - if err != nil { - _, ferr := fmt.Fprintf(w, "/* %v */\n", err) - return ferr - } - wire, tag := x&7, x>>3 - if wire == WireEndGroup { - w.unindent() - if _, werr := w.Write(endBraceNewline); werr != nil { - return werr - } - continue - } - if _, ferr := fmt.Fprint(w, tag); ferr != nil { - return ferr - } - if wire != WireStartGroup { - if err = w.WriteByte(':'); err != nil { - return err - } - } - if !w.compact || wire == WireStartGroup { - if err = w.WriteByte(' '); err != nil { - return err - } - } - switch wire { - case WireBytes: - buf, e := b.DecodeRawBytes(false) - if e == nil { - _, err = fmt.Fprintf(w, "%q", buf) - } else { - _, err = fmt.Fprintf(w, "/* %v */", e) - } - case WireFixed32: - x, err = b.DecodeFixed32() - err = writeUnknownInt(w, x, err) - case WireFixed64: - x, err = b.DecodeFixed64() - err = writeUnknownInt(w, x, err) - case WireStartGroup: - err = w.WriteByte('{') - w.indent() - case WireVarint: - x, err = b.DecodeVarint() - err = writeUnknownInt(w, x, err) - default: - _, err = fmt.Fprintf(w, "/* unknown wire type %d */", wire) - } - if err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - return nil -} - -func writeUnknownInt(w *textWriter, x uint64, err error) error { - if err == nil { - _, err = fmt.Fprint(w, x) - } else { - _, err = fmt.Fprintf(w, "/* %v */", err) - } - return err -} - -type int32Slice []int32 - -func (s int32Slice) Len() int { return len(s) } -func (s int32Slice) Less(i, j int) bool { return s[i] < s[j] } -func (s int32Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } - -// writeExtensions writes all the extensions in pv. -// pv is assumed to be a pointer to a protocol message struct that is extendable. -func (tm *TextMarshaler) writeExtensions(w *textWriter, pv reflect.Value) error { - emap := extensionMaps[pv.Type().Elem()] - e := pv.Interface().(Message) - - var m map[int32]Extension - var mu sync.Locker - if em, ok := e.(extensionsBytes); ok { - eb := em.GetExtensions() - var err error - m, err = BytesToExtensionsMap(*eb) - if err != nil { - return err - } - mu = notLocker{} - } else if _, ok := e.(extendableProto); ok { - ep, _ := extendable(e) - m, mu = ep.extensionsRead() - if m == nil { - return nil - } - } - - // Order the extensions by ID. - // This isn't strictly necessary, but it will give us - // canonical output, which will also make testing easier. - - mu.Lock() - ids := make([]int32, 0, len(m)) - for id := range m { - ids = append(ids, id) - } - sort.Sort(int32Slice(ids)) - mu.Unlock() - - for _, extNum := range ids { - ext := m[extNum] - var desc *ExtensionDesc - if emap != nil { - desc = emap[extNum] - } - if desc == nil { - // Unknown extension. - if err := writeUnknownStruct(w, ext.enc); err != nil { - return err - } - continue - } - - pb, err := GetExtension(e, desc) - if err != nil { - return fmt.Errorf("failed getting extension: %v", err) - } - - // Repeated extensions will appear as a slice. - if !desc.repeated() { - if err := tm.writeExtension(w, desc.Name, pb); err != nil { - return err - } - } else { - v := reflect.ValueOf(pb) - for i := 0; i < v.Len(); i++ { - if err := tm.writeExtension(w, desc.Name, v.Index(i).Interface()); err != nil { - return err - } - } - } - } - return nil -} - -func (tm *TextMarshaler) writeExtension(w *textWriter, name string, pb interface{}) error { - if _, err := fmt.Fprintf(w, "[%s]:", name); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if err := tm.writeAny(w, reflect.ValueOf(pb), nil); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - return nil -} - -func (w *textWriter) writeIndent() { - if !w.complete { - return - } - remain := w.ind * 2 - for remain > 0 { - n := remain - if n > len(spaces) { - n = len(spaces) - } - w.w.Write(spaces[:n]) - remain -= n - } - w.complete = false -} - -// TextMarshaler is a configurable text format marshaler. -type TextMarshaler struct { - Compact bool // use compact text format (one line). - ExpandAny bool // expand google.protobuf.Any messages of known types -} - -// Marshal writes a given protocol buffer in text format. -// The only errors returned are from w. -func (tm *TextMarshaler) Marshal(w io.Writer, pb Message) error { - val := reflect.ValueOf(pb) - if pb == nil || val.IsNil() { - w.Write([]byte("")) - return nil - } - var bw *bufio.Writer - ww, ok := w.(writer) - if !ok { - bw = bufio.NewWriter(w) - ww = bw - } - aw := &textWriter{ - w: ww, - complete: true, - compact: tm.Compact, - } - - if etm, ok := pb.(encoding.TextMarshaler); ok { - text, err := etm.MarshalText() - if err != nil { - return err - } - if _, err = aw.Write(text); err != nil { - return err - } - if bw != nil { - return bw.Flush() - } - return nil - } - // Dereference the received pointer so we don't have outer < and >. - v := reflect.Indirect(val) - if err := tm.writeStruct(aw, v); err != nil { - return err - } - if bw != nil { - return bw.Flush() - } - return nil -} - -// Text is the same as Marshal, but returns the string directly. -func (tm *TextMarshaler) Text(pb Message) string { - var buf bytes.Buffer - tm.Marshal(&buf, pb) - return buf.String() -} - -var ( - defaultTextMarshaler = TextMarshaler{} - compactTextMarshaler = TextMarshaler{Compact: true} -) - -// TODO: consider removing some of the Marshal functions below. - -// MarshalText writes a given protocol buffer in text format. -// The only errors returned are from w. -func MarshalText(w io.Writer, pb Message) error { return defaultTextMarshaler.Marshal(w, pb) } - -// MarshalTextString is the same as MarshalText, but returns the string directly. -func MarshalTextString(pb Message) string { return defaultTextMarshaler.Text(pb) } - -// CompactText writes a given protocol buffer in compact text format (one line). -func CompactText(w io.Writer, pb Message) error { return compactTextMarshaler.Marshal(w, pb) } - -// CompactTextString is the same as CompactText, but returns the string directly. -func CompactTextString(pb Message) string { return compactTextMarshaler.Text(pb) } diff --git a/vendor/github.com/gogo/protobuf/proto/text_gogo.go b/vendor/github.com/gogo/protobuf/proto/text_gogo.go deleted file mode 100644 index 1d6c6aa..0000000 --- a/vendor/github.com/gogo/protobuf/proto/text_gogo.go +++ /dev/null @@ -1,57 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "fmt" - "reflect" -) - -func (tm *TextMarshaler) writeEnum(w *textWriter, v reflect.Value, props *Properties) error { - m, ok := enumStringMaps[props.Enum] - if !ok { - if err := tm.writeAny(w, v, props); err != nil { - return err - } - } - key := int32(0) - if v.Kind() == reflect.Ptr { - key = int32(v.Elem().Int()) - } else { - key = int32(v.Int()) - } - s, ok := m[key] - if !ok { - if err := tm.writeAny(w, v, props); err != nil { - return err - } - } - _, err := fmt.Fprint(w, s) - return err -} diff --git a/vendor/github.com/gogo/protobuf/proto/text_parser.go b/vendor/github.com/gogo/protobuf/proto/text_parser.go deleted file mode 100644 index 1ce0be2..0000000 --- a/vendor/github.com/gogo/protobuf/proto/text_parser.go +++ /dev/null @@ -1,1018 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -// Functions for parsing the Text protocol buffer format. -// TODO: message sets. - -import ( - "encoding" - "errors" - "fmt" - "reflect" - "strconv" - "strings" - "time" - "unicode/utf8" -) - -// Error string emitted when deserializing Any and fields are already set -const anyRepeatedlyUnpacked = "Any message unpacked multiple times, or %q already set" - -type ParseError struct { - Message string - Line int // 1-based line number - Offset int // 0-based byte offset from start of input -} - -func (p *ParseError) Error() string { - if p.Line == 1 { - // show offset only for first line - return fmt.Sprintf("line 1.%d: %v", p.Offset, p.Message) - } - return fmt.Sprintf("line %d: %v", p.Line, p.Message) -} - -type token struct { - value string - err *ParseError - line int // line number - offset int // byte number from start of input, not start of line - unquoted string // the unquoted version of value, if it was a quoted string -} - -func (t *token) String() string { - if t.err == nil { - return fmt.Sprintf("%q (line=%d, offset=%d)", t.value, t.line, t.offset) - } - return fmt.Sprintf("parse error: %v", t.err) -} - -type textParser struct { - s string // remaining input - done bool // whether the parsing is finished (success or error) - backed bool // whether back() was called - offset, line int - cur token -} - -func newTextParser(s string) *textParser { - p := new(textParser) - p.s = s - p.line = 1 - p.cur.line = 1 - return p -} - -func (p *textParser) errorf(format string, a ...interface{}) *ParseError { - pe := &ParseError{fmt.Sprintf(format, a...), p.cur.line, p.cur.offset} - p.cur.err = pe - p.done = true - return pe -} - -// Numbers and identifiers are matched by [-+._A-Za-z0-9] -func isIdentOrNumberChar(c byte) bool { - switch { - case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z': - return true - case '0' <= c && c <= '9': - return true - } - switch c { - case '-', '+', '.', '_': - return true - } - return false -} - -func isWhitespace(c byte) bool { - switch c { - case ' ', '\t', '\n', '\r': - return true - } - return false -} - -func isQuote(c byte) bool { - switch c { - case '"', '\'': - return true - } - return false -} - -func (p *textParser) skipWhitespace() { - i := 0 - for i < len(p.s) && (isWhitespace(p.s[i]) || p.s[i] == '#') { - if p.s[i] == '#' { - // comment; skip to end of line or input - for i < len(p.s) && p.s[i] != '\n' { - i++ - } - if i == len(p.s) { - break - } - } - if p.s[i] == '\n' { - p.line++ - } - i++ - } - p.offset += i - p.s = p.s[i:len(p.s)] - if len(p.s) == 0 { - p.done = true - } -} - -func (p *textParser) advance() { - // Skip whitespace - p.skipWhitespace() - if p.done { - return - } - - // Start of non-whitespace - p.cur.err = nil - p.cur.offset, p.cur.line = p.offset, p.line - p.cur.unquoted = "" - switch p.s[0] { - case '<', '>', '{', '}', ':', '[', ']', ';', ',', '/': - // Single symbol - p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)] - case '"', '\'': - // Quoted string - i := 1 - for i < len(p.s) && p.s[i] != p.s[0] && p.s[i] != '\n' { - if p.s[i] == '\\' && i+1 < len(p.s) { - // skip escaped char - i++ - } - i++ - } - if i >= len(p.s) || p.s[i] != p.s[0] { - p.errorf("unmatched quote") - return - } - unq, err := unquoteC(p.s[1:i], rune(p.s[0])) - if err != nil { - p.errorf("invalid quoted string %s: %v", p.s[0:i+1], err) - return - } - p.cur.value, p.s = p.s[0:i+1], p.s[i+1:len(p.s)] - p.cur.unquoted = unq - default: - i := 0 - for i < len(p.s) && isIdentOrNumberChar(p.s[i]) { - i++ - } - if i == 0 { - p.errorf("unexpected byte %#x", p.s[0]) - return - } - p.cur.value, p.s = p.s[0:i], p.s[i:len(p.s)] - } - p.offset += len(p.cur.value) -} - -var ( - errBadUTF8 = errors.New("proto: bad UTF-8") -) - -func unquoteC(s string, quote rune) (string, error) { - // This is based on C++'s tokenizer.cc. - // Despite its name, this is *not* parsing C syntax. - // For instance, "\0" is an invalid quoted string. - - // Avoid allocation in trivial cases. - simple := true - for _, r := range s { - if r == '\\' || r == quote { - simple = false - break - } - } - if simple { - return s, nil - } - - buf := make([]byte, 0, 3*len(s)/2) - for len(s) > 0 { - r, n := utf8.DecodeRuneInString(s) - if r == utf8.RuneError && n == 1 { - return "", errBadUTF8 - } - s = s[n:] - if r != '\\' { - if r < utf8.RuneSelf { - buf = append(buf, byte(r)) - } else { - buf = append(buf, string(r)...) - } - continue - } - - ch, tail, err := unescape(s) - if err != nil { - return "", err - } - buf = append(buf, ch...) - s = tail - } - return string(buf), nil -} - -func unescape(s string) (ch string, tail string, err error) { - r, n := utf8.DecodeRuneInString(s) - if r == utf8.RuneError && n == 1 { - return "", "", errBadUTF8 - } - s = s[n:] - switch r { - case 'a': - return "\a", s, nil - case 'b': - return "\b", s, nil - case 'f': - return "\f", s, nil - case 'n': - return "\n", s, nil - case 'r': - return "\r", s, nil - case 't': - return "\t", s, nil - case 'v': - return "\v", s, nil - case '?': - return "?", s, nil // trigraph workaround - case '\'', '"', '\\': - return string(r), s, nil - case '0', '1', '2', '3', '4', '5', '6', '7': - if len(s) < 2 { - return "", "", fmt.Errorf(`\%c requires 2 following digits`, r) - } - ss := string(r) + s[:2] - s = s[2:] - i, err := strconv.ParseUint(ss, 8, 8) - if err != nil { - return "", "", fmt.Errorf(`\%s contains non-octal digits`, ss) - } - return string([]byte{byte(i)}), s, nil - case 'x', 'X', 'u', 'U': - var n int - switch r { - case 'x', 'X': - n = 2 - case 'u': - n = 4 - case 'U': - n = 8 - } - if len(s) < n { - return "", "", fmt.Errorf(`\%c requires %d following digits`, r, n) - } - ss := s[:n] - s = s[n:] - i, err := strconv.ParseUint(ss, 16, 64) - if err != nil { - return "", "", fmt.Errorf(`\%c%s contains non-hexadecimal digits`, r, ss) - } - if r == 'x' || r == 'X' { - return string([]byte{byte(i)}), s, nil - } - if i > utf8.MaxRune { - return "", "", fmt.Errorf(`\%c%s is not a valid Unicode code point`, r, ss) - } - return string(i), s, nil - } - return "", "", fmt.Errorf(`unknown escape \%c`, r) -} - -// Back off the parser by one token. Can only be done between calls to next(). -// It makes the next advance() a no-op. -func (p *textParser) back() { p.backed = true } - -// Advances the parser and returns the new current token. -func (p *textParser) next() *token { - if p.backed || p.done { - p.backed = false - return &p.cur - } - p.advance() - if p.done { - p.cur.value = "" - } else if len(p.cur.value) > 0 && isQuote(p.cur.value[0]) { - // Look for multiple quoted strings separated by whitespace, - // and concatenate them. - cat := p.cur - for { - p.skipWhitespace() - if p.done || !isQuote(p.s[0]) { - break - } - p.advance() - if p.cur.err != nil { - return &p.cur - } - cat.value += " " + p.cur.value - cat.unquoted += p.cur.unquoted - } - p.done = false // parser may have seen EOF, but we want to return cat - p.cur = cat - } - return &p.cur -} - -func (p *textParser) consumeToken(s string) error { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value != s { - p.back() - return p.errorf("expected %q, found %q", s, tok.value) - } - return nil -} - -// Return a RequiredNotSetError indicating which required field was not set. -func (p *textParser) missingRequiredFieldError(sv reflect.Value) *RequiredNotSetError { - st := sv.Type() - sprops := GetProperties(st) - for i := 0; i < st.NumField(); i++ { - if !isNil(sv.Field(i)) { - continue - } - - props := sprops.Prop[i] - if props.Required { - return &RequiredNotSetError{fmt.Sprintf("%v.%v", st, props.OrigName)} - } - } - return &RequiredNotSetError{fmt.Sprintf("%v.", st)} // should not happen -} - -// Returns the index in the struct for the named field, as well as the parsed tag properties. -func structFieldByName(sprops *StructProperties, name string) (int, *Properties, bool) { - i, ok := sprops.decoderOrigNames[name] - if ok { - return i, sprops.Prop[i], true - } - return -1, nil, false -} - -// Consume a ':' from the input stream (if the next token is a colon), -// returning an error if a colon is needed but not present. -func (p *textParser) checkForColon(props *Properties, typ reflect.Type) *ParseError { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value != ":" { - // Colon is optional when the field is a group or message. - needColon := true - switch props.Wire { - case "group": - needColon = false - case "bytes": - // A "bytes" field is either a message, a string, or a repeated field; - // those three become *T, *string and []T respectively, so we can check for - // this field being a pointer to a non-string. - if typ.Kind() == reflect.Ptr { - // *T or *string - if typ.Elem().Kind() == reflect.String { - break - } - } else if typ.Kind() == reflect.Slice { - // []T or []*T - if typ.Elem().Kind() != reflect.Ptr { - break - } - } else if typ.Kind() == reflect.String { - // The proto3 exception is for a string field, - // which requires a colon. - break - } - needColon = false - } - if needColon { - return p.errorf("expected ':', found %q", tok.value) - } - p.back() - } - return nil -} - -func (p *textParser) readStruct(sv reflect.Value, terminator string) error { - st := sv.Type() - sprops := GetProperties(st) - reqCount := sprops.reqCount - var reqFieldErr error - fieldSet := make(map[string]bool) - // A struct is a sequence of "name: value", terminated by one of - // '>' or '}', or the end of the input. A name may also be - // "[extension]" or "[type/url]". - // - // The whole struct can also be an expanded Any message, like: - // [type/url] < ... struct contents ... > - for { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == terminator { - break - } - if tok.value == "[" { - // Looks like an extension or an Any. - // - // TODO: Check whether we need to handle - // namespace rooted names (e.g. ".something.Foo"). - extName, err := p.consumeExtName() - if err != nil { - return err - } - - if s := strings.LastIndex(extName, "/"); s >= 0 { - // If it contains a slash, it's an Any type URL. - messageName := extName[s+1:] - mt := MessageType(messageName) - if mt == nil { - return p.errorf("unrecognized message %q in google.protobuf.Any", messageName) - } - tok = p.next() - if tok.err != nil { - return tok.err - } - // consume an optional colon - if tok.value == ":" { - tok = p.next() - if tok.err != nil { - return tok.err - } - } - var terminator string - switch tok.value { - case "<": - terminator = ">" - case "{": - terminator = "}" - default: - return p.errorf("expected '{' or '<', found %q", tok.value) - } - v := reflect.New(mt.Elem()) - if pe := p.readStruct(v.Elem(), terminator); pe != nil { - return pe - } - b, err := Marshal(v.Interface().(Message)) - if err != nil { - return p.errorf("failed to marshal message of type %q: %v", messageName, err) - } - if fieldSet["type_url"] { - return p.errorf(anyRepeatedlyUnpacked, "type_url") - } - if fieldSet["value"] { - return p.errorf(anyRepeatedlyUnpacked, "value") - } - sv.FieldByName("TypeUrl").SetString(extName) - sv.FieldByName("Value").SetBytes(b) - fieldSet["type_url"] = true - fieldSet["value"] = true - continue - } - - var desc *ExtensionDesc - // This could be faster, but it's functional. - // TODO: Do something smarter than a linear scan. - for _, d := range RegisteredExtensions(reflect.New(st).Interface().(Message)) { - if d.Name == extName { - desc = d - break - } - } - if desc == nil { - return p.errorf("unrecognized extension %q", extName) - } - - props := &Properties{} - props.Parse(desc.Tag) - - typ := reflect.TypeOf(desc.ExtensionType) - if err := p.checkForColon(props, typ); err != nil { - return err - } - - rep := desc.repeated() - - // Read the extension structure, and set it in - // the value we're constructing. - var ext reflect.Value - if !rep { - ext = reflect.New(typ).Elem() - } else { - ext = reflect.New(typ.Elem()).Elem() - } - if err := p.readAny(ext, props); err != nil { - if _, ok := err.(*RequiredNotSetError); !ok { - return err - } - reqFieldErr = err - } - ep := sv.Addr().Interface().(Message) - if !rep { - SetExtension(ep, desc, ext.Interface()) - } else { - old, err := GetExtension(ep, desc) - var sl reflect.Value - if err == nil { - sl = reflect.ValueOf(old) // existing slice - } else { - sl = reflect.MakeSlice(typ, 0, 1) - } - sl = reflect.Append(sl, ext) - SetExtension(ep, desc, sl.Interface()) - } - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - continue - } - - // This is a normal, non-extension field. - name := tok.value - var dst reflect.Value - fi, props, ok := structFieldByName(sprops, name) - if ok { - dst = sv.Field(fi) - } else if oop, ok := sprops.OneofTypes[name]; ok { - // It is a oneof. - props = oop.Prop - nv := reflect.New(oop.Type.Elem()) - dst = nv.Elem().Field(0) - field := sv.Field(oop.Field) - if !field.IsNil() { - return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, sv.Type().Field(oop.Field).Name) - } - field.Set(nv) - } - if !dst.IsValid() { - return p.errorf("unknown field name %q in %v", name, st) - } - - if dst.Kind() == reflect.Map { - // Consume any colon. - if err := p.checkForColon(props, dst.Type()); err != nil { - return err - } - - // Construct the map if it doesn't already exist. - if dst.IsNil() { - dst.Set(reflect.MakeMap(dst.Type())) - } - key := reflect.New(dst.Type().Key()).Elem() - val := reflect.New(dst.Type().Elem()).Elem() - - // The map entry should be this sequence of tokens: - // < key : KEY value : VALUE > - // However, implementations may omit key or value, and technically - // we should support them in any order. See b/28924776 for a time - // this went wrong. - - tok := p.next() - var terminator string - switch tok.value { - case "<": - terminator = ">" - case "{": - terminator = "}" - default: - return p.errorf("expected '{' or '<', found %q", tok.value) - } - for { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == terminator { - break - } - switch tok.value { - case "key": - if err := p.consumeToken(":"); err != nil { - return err - } - if err := p.readAny(key, props.MapKeyProp); err != nil { - return err - } - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - case "value": - if err := p.checkForColon(props.MapValProp, dst.Type().Elem()); err != nil { - return err - } - if err := p.readAny(val, props.MapValProp); err != nil { - return err - } - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - default: - p.back() - return p.errorf(`expected "key", "value", or %q, found %q`, terminator, tok.value) - } - } - - dst.SetMapIndex(key, val) - continue - } - - // Check that it's not already set if it's not a repeated field. - if !props.Repeated && fieldSet[name] { - return p.errorf("non-repeated field %q was repeated", name) - } - - if err := p.checkForColon(props, dst.Type()); err != nil { - return err - } - - // Parse into the field. - fieldSet[name] = true - if err := p.readAny(dst, props); err != nil { - if _, ok := err.(*RequiredNotSetError); !ok { - return err - } - reqFieldErr = err - } - if props.Required { - reqCount-- - } - - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - - } - - if reqCount > 0 { - return p.missingRequiredFieldError(sv) - } - return reqFieldErr -} - -// consumeExtName consumes extension name or expanded Any type URL and the -// following ']'. It returns the name or URL consumed. -func (p *textParser) consumeExtName() (string, error) { - tok := p.next() - if tok.err != nil { - return "", tok.err - } - - // If extension name or type url is quoted, it's a single token. - if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] { - name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0])) - if err != nil { - return "", err - } - return name, p.consumeToken("]") - } - - // Consume everything up to "]" - var parts []string - for tok.value != "]" { - parts = append(parts, tok.value) - tok = p.next() - if tok.err != nil { - return "", p.errorf("unrecognized type_url or extension name: %s", tok.err) - } - if p.done && tok.value != "]" { - return "", p.errorf("unclosed type_url or extension name") - } - } - return strings.Join(parts, ""), nil -} - -// consumeOptionalSeparator consumes an optional semicolon or comma. -// It is used in readStruct to provide backward compatibility. -func (p *textParser) consumeOptionalSeparator() error { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value != ";" && tok.value != "," { - p.back() - } - return nil -} - -func (p *textParser) readAny(v reflect.Value, props *Properties) error { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == "" { - return p.errorf("unexpected EOF") - } - if len(props.CustomType) > 0 { - if props.Repeated { - t := reflect.TypeOf(v.Interface()) - if t.Kind() == reflect.Slice { - tc := reflect.TypeOf(new(Marshaler)) - ok := t.Elem().Implements(tc.Elem()) - if ok { - fv := v - flen := fv.Len() - if flen == fv.Cap() { - nav := reflect.MakeSlice(v.Type(), flen, 2*flen+1) - reflect.Copy(nav, fv) - fv.Set(nav) - } - fv.SetLen(flen + 1) - - // Read one. - p.back() - return p.readAny(fv.Index(flen), props) - } - } - } - if reflect.TypeOf(v.Interface()).Kind() == reflect.Ptr { - custom := reflect.New(props.ctype.Elem()).Interface().(Unmarshaler) - err := custom.Unmarshal([]byte(tok.unquoted)) - if err != nil { - return p.errorf("%v %v: %v", err, v.Type(), tok.value) - } - v.Set(reflect.ValueOf(custom)) - } else { - custom := reflect.New(reflect.TypeOf(v.Interface())).Interface().(Unmarshaler) - err := custom.Unmarshal([]byte(tok.unquoted)) - if err != nil { - return p.errorf("%v %v: %v", err, v.Type(), tok.value) - } - v.Set(reflect.Indirect(reflect.ValueOf(custom))) - } - return nil - } - if props.StdTime { - fv := v - p.back() - props.StdTime = false - tproto := ×tamp{} - err := p.readAny(reflect.ValueOf(tproto).Elem(), props) - props.StdTime = true - if err != nil { - return err - } - tim, err := timestampFromProto(tproto) - if err != nil { - return err - } - if props.Repeated { - t := reflect.TypeOf(v.Interface()) - if t.Kind() == reflect.Slice { - if t.Elem().Kind() == reflect.Ptr { - ts := fv.Interface().([]*time.Time) - ts = append(ts, &tim) - fv.Set(reflect.ValueOf(ts)) - return nil - } else { - ts := fv.Interface().([]time.Time) - ts = append(ts, tim) - fv.Set(reflect.ValueOf(ts)) - return nil - } - } - } - if reflect.TypeOf(v.Interface()).Kind() == reflect.Ptr { - v.Set(reflect.ValueOf(&tim)) - } else { - v.Set(reflect.Indirect(reflect.ValueOf(&tim))) - } - return nil - } - if props.StdDuration { - fv := v - p.back() - props.StdDuration = false - dproto := &duration{} - err := p.readAny(reflect.ValueOf(dproto).Elem(), props) - props.StdDuration = true - if err != nil { - return err - } - dur, err := durationFromProto(dproto) - if err != nil { - return err - } - if props.Repeated { - t := reflect.TypeOf(v.Interface()) - if t.Kind() == reflect.Slice { - if t.Elem().Kind() == reflect.Ptr { - ds := fv.Interface().([]*time.Duration) - ds = append(ds, &dur) - fv.Set(reflect.ValueOf(ds)) - return nil - } else { - ds := fv.Interface().([]time.Duration) - ds = append(ds, dur) - fv.Set(reflect.ValueOf(ds)) - return nil - } - } - } - if reflect.TypeOf(v.Interface()).Kind() == reflect.Ptr { - v.Set(reflect.ValueOf(&dur)) - } else { - v.Set(reflect.Indirect(reflect.ValueOf(&dur))) - } - return nil - } - switch fv := v; fv.Kind() { - case reflect.Slice: - at := v.Type() - if at.Elem().Kind() == reflect.Uint8 { - // Special case for []byte - if tok.value[0] != '"' && tok.value[0] != '\'' { - // Deliberately written out here, as the error after - // this switch statement would write "invalid []byte: ...", - // which is not as user-friendly. - return p.errorf("invalid string: %v", tok.value) - } - bytes := []byte(tok.unquoted) - fv.Set(reflect.ValueOf(bytes)) - return nil - } - // Repeated field. - if tok.value == "[" { - // Repeated field with list notation, like [1,2,3]. - for { - fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem())) - err := p.readAny(fv.Index(fv.Len()-1), props) - if err != nil { - return err - } - ntok := p.next() - if ntok.err != nil { - return ntok.err - } - if ntok.value == "]" { - break - } - if ntok.value != "," { - return p.errorf("Expected ']' or ',' found %q", ntok.value) - } - } - return nil - } - // One value of the repeated field. - p.back() - fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem())) - return p.readAny(fv.Index(fv.Len()-1), props) - case reflect.Bool: - // true/1/t/True or false/f/0/False. - switch tok.value { - case "true", "1", "t", "True": - fv.SetBool(true) - return nil - case "false", "0", "f", "False": - fv.SetBool(false) - return nil - } - case reflect.Float32, reflect.Float64: - v := tok.value - // Ignore 'f' for compatibility with output generated by C++, but don't - // remove 'f' when the value is "-inf" or "inf". - if strings.HasSuffix(v, "f") && tok.value != "-inf" && tok.value != "inf" { - v = v[:len(v)-1] - } - if f, err := strconv.ParseFloat(v, fv.Type().Bits()); err == nil { - fv.SetFloat(f) - return nil - } - case reflect.Int8: - if x, err := strconv.ParseInt(tok.value, 0, 8); err == nil { - fv.SetInt(x) - return nil - } - case reflect.Int16: - if x, err := strconv.ParseInt(tok.value, 0, 16); err == nil { - fv.SetInt(x) - return nil - } - case reflect.Int32: - if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil { - fv.SetInt(x) - return nil - } - - if len(props.Enum) == 0 { - break - } - m, ok := enumValueMaps[props.Enum] - if !ok { - break - } - x, ok := m[tok.value] - if !ok { - break - } - fv.SetInt(int64(x)) - return nil - case reflect.Int64: - if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil { - fv.SetInt(x) - return nil - } - - case reflect.Ptr: - // A basic field (indirected through pointer), or a repeated message/group - p.back() - fv.Set(reflect.New(fv.Type().Elem())) - return p.readAny(fv.Elem(), props) - case reflect.String: - if tok.value[0] == '"' || tok.value[0] == '\'' { - fv.SetString(tok.unquoted) - return nil - } - case reflect.Struct: - var terminator string - switch tok.value { - case "{": - terminator = "}" - case "<": - terminator = ">" - default: - return p.errorf("expected '{' or '<', found %q", tok.value) - } - // TODO: Handle nested messages which implement encoding.TextUnmarshaler. - return p.readStruct(fv, terminator) - case reflect.Uint8: - if x, err := strconv.ParseUint(tok.value, 0, 8); err == nil { - fv.SetUint(x) - return nil - } - case reflect.Uint16: - if x, err := strconv.ParseUint(tok.value, 0, 16); err == nil { - fv.SetUint(x) - return nil - } - case reflect.Uint32: - if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil { - fv.SetUint(uint64(x)) - return nil - } - case reflect.Uint64: - if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil { - fv.SetUint(x) - return nil - } - } - return p.errorf("invalid %v: %v", v.Type(), tok.value) -} - -// UnmarshalText reads a protocol buffer in Text format. UnmarshalText resets pb -// before starting to unmarshal, so any existing data in pb is always removed. -// If a required field is not set and no other error occurs, -// UnmarshalText returns *RequiredNotSetError. -func UnmarshalText(s string, pb Message) error { - if um, ok := pb.(encoding.TextUnmarshaler); ok { - return um.UnmarshalText([]byte(s)) - } - pb.Reset() - v := reflect.ValueOf(pb) - return newTextParser(s).readStruct(v.Elem(), "") -} diff --git a/vendor/github.com/gogo/protobuf/proto/timestamp.go b/vendor/github.com/gogo/protobuf/proto/timestamp.go deleted file mode 100644 index 9324f65..0000000 --- a/vendor/github.com/gogo/protobuf/proto/timestamp.go +++ /dev/null @@ -1,113 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -// This file implements operations on google.protobuf.Timestamp. - -import ( - "errors" - "fmt" - "time" -) - -const ( - // Seconds field of the earliest valid Timestamp. - // This is time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC).Unix(). - minValidSeconds = -62135596800 - // Seconds field just after the latest valid Timestamp. - // This is time.Date(10000, 1, 1, 0, 0, 0, 0, time.UTC).Unix(). - maxValidSeconds = 253402300800 -) - -// validateTimestamp determines whether a Timestamp is valid. -// A valid timestamp represents a time in the range -// [0001-01-01, 10000-01-01) and has a Nanos field -// in the range [0, 1e9). -// -// If the Timestamp is valid, validateTimestamp returns nil. -// Otherwise, it returns an error that describes -// the problem. -// -// Every valid Timestamp can be represented by a time.Time, but the converse is not true. -func validateTimestamp(ts *timestamp) error { - if ts == nil { - return errors.New("timestamp: nil Timestamp") - } - if ts.Seconds < minValidSeconds { - return fmt.Errorf("timestamp: %#v before 0001-01-01", ts) - } - if ts.Seconds >= maxValidSeconds { - return fmt.Errorf("timestamp: %#v after 10000-01-01", ts) - } - if ts.Nanos < 0 || ts.Nanos >= 1e9 { - return fmt.Errorf("timestamp: %#v: nanos not in range [0, 1e9)", ts) - } - return nil -} - -// TimestampFromProto converts a google.protobuf.Timestamp proto to a time.Time. -// It returns an error if the argument is invalid. -// -// Unlike most Go functions, if Timestamp returns an error, the first return value -// is not the zero time.Time. Instead, it is the value obtained from the -// time.Unix function when passed the contents of the Timestamp, in the UTC -// locale. This may or may not be a meaningful time; many invalid Timestamps -// do map to valid time.Times. -// -// A nil Timestamp returns an error. The first return value in that case is -// undefined. -func timestampFromProto(ts *timestamp) (time.Time, error) { - // Don't return the zero value on error, because corresponds to a valid - // timestamp. Instead return whatever time.Unix gives us. - var t time.Time - if ts == nil { - t = time.Unix(0, 0).UTC() // treat nil like the empty Timestamp - } else { - t = time.Unix(ts.Seconds, int64(ts.Nanos)).UTC() - } - return t, validateTimestamp(ts) -} - -// TimestampProto converts the time.Time to a google.protobuf.Timestamp proto. -// It returns an error if the resulting Timestamp is invalid. -func timestampProto(t time.Time) (*timestamp, error) { - seconds := t.Unix() - nanos := int32(t.Sub(time.Unix(seconds, 0))) - ts := ×tamp{ - Seconds: seconds, - Nanos: nanos, - } - if err := validateTimestamp(ts); err != nil { - return nil, err - } - return ts, nil -} diff --git a/vendor/github.com/gogo/protobuf/proto/timestamp_gogo.go b/vendor/github.com/gogo/protobuf/proto/timestamp_gogo.go deleted file mode 100644 index 38439fa..0000000 --- a/vendor/github.com/gogo/protobuf/proto/timestamp_gogo.go +++ /dev/null @@ -1,49 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2016, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "reflect" - "time" -) - -var timeType = reflect.TypeOf((*time.Time)(nil)).Elem() - -type timestamp struct { - Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` - Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` -} - -func (m *timestamp) Reset() { *m = timestamp{} } -func (*timestamp) ProtoMessage() {} -func (*timestamp) String() string { return "timestamp" } - -func init() { - RegisterType((*timestamp)(nil), "gogo.protobuf.proto.timestamp") -} diff --git a/vendor/github.com/gogo/protobuf/proto/wrappers.go b/vendor/github.com/gogo/protobuf/proto/wrappers.go deleted file mode 100644 index b175d1b..0000000 --- a/vendor/github.com/gogo/protobuf/proto/wrappers.go +++ /dev/null @@ -1,1888 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2018, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "io" - "reflect" -) - -func makeStdDoubleValueMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*float64) - v := &float64Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*float64) - v := &float64Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdDoubleValuePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*float64) - v := &float64Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*float64) - v := &float64Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdDoubleValueSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(float64) - v := &float64Value{t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(float64) - v := &float64Value{t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdDoubleValuePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*float64) - v := &float64Value{*t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*float64) - v := &float64Value{*t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdDoubleValueUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &float64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(m.Value)) - return b[x:], nil - } -} - -func makeStdDoubleValuePtrUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &float64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&m.Value)) - return b[x:], nil - } -} - -func makeStdDoubleValuePtrSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &float64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdDoubleValueSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &float64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdFloatValueMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*float32) - v := &float32Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*float32) - v := &float32Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdFloatValuePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*float32) - v := &float32Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*float32) - v := &float32Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdFloatValueSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(float32) - v := &float32Value{t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(float32) - v := &float32Value{t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdFloatValuePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*float32) - v := &float32Value{*t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*float32) - v := &float32Value{*t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdFloatValueUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &float32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(m.Value)) - return b[x:], nil - } -} - -func makeStdFloatValuePtrUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &float32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&m.Value)) - return b[x:], nil - } -} - -func makeStdFloatValuePtrSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &float32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdFloatValueSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &float32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdInt64ValueMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*int64) - v := &int64Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*int64) - v := &int64Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdInt64ValuePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*int64) - v := &int64Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*int64) - v := &int64Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdInt64ValueSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(int64) - v := &int64Value{t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(int64) - v := &int64Value{t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdInt64ValuePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*int64) - v := &int64Value{*t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*int64) - v := &int64Value{*t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdInt64ValueUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &int64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(m.Value)) - return b[x:], nil - } -} - -func makeStdInt64ValuePtrUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &int64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&m.Value)) - return b[x:], nil - } -} - -func makeStdInt64ValuePtrSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &int64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdInt64ValueSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &int64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdUInt64ValueMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*uint64) - v := &uint64Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*uint64) - v := &uint64Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdUInt64ValuePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*uint64) - v := &uint64Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*uint64) - v := &uint64Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdUInt64ValueSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(uint64) - v := &uint64Value{t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(uint64) - v := &uint64Value{t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdUInt64ValuePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*uint64) - v := &uint64Value{*t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*uint64) - v := &uint64Value{*t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdUInt64ValueUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &uint64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(m.Value)) - return b[x:], nil - } -} - -func makeStdUInt64ValuePtrUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &uint64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&m.Value)) - return b[x:], nil - } -} - -func makeStdUInt64ValuePtrSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &uint64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdUInt64ValueSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &uint64Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdInt32ValueMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*int32) - v := &int32Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*int32) - v := &int32Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdInt32ValuePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*int32) - v := &int32Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*int32) - v := &int32Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdInt32ValueSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(int32) - v := &int32Value{t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(int32) - v := &int32Value{t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdInt32ValuePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*int32) - v := &int32Value{*t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*int32) - v := &int32Value{*t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdInt32ValueUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &int32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(m.Value)) - return b[x:], nil - } -} - -func makeStdInt32ValuePtrUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &int32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&m.Value)) - return b[x:], nil - } -} - -func makeStdInt32ValuePtrSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &int32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdInt32ValueSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &int32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdUInt32ValueMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*uint32) - v := &uint32Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*uint32) - v := &uint32Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdUInt32ValuePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*uint32) - v := &uint32Value{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*uint32) - v := &uint32Value{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdUInt32ValueSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(uint32) - v := &uint32Value{t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(uint32) - v := &uint32Value{t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdUInt32ValuePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*uint32) - v := &uint32Value{*t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*uint32) - v := &uint32Value{*t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdUInt32ValueUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &uint32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(m.Value)) - return b[x:], nil - } -} - -func makeStdUInt32ValuePtrUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &uint32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&m.Value)) - return b[x:], nil - } -} - -func makeStdUInt32ValuePtrSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &uint32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdUInt32ValueSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &uint32Value{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdBoolValueMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*bool) - v := &boolValue{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*bool) - v := &boolValue{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdBoolValuePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*bool) - v := &boolValue{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*bool) - v := &boolValue{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdBoolValueSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(bool) - v := &boolValue{t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(bool) - v := &boolValue{t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdBoolValuePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*bool) - v := &boolValue{*t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*bool) - v := &boolValue{*t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdBoolValueUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &boolValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(m.Value)) - return b[x:], nil - } -} - -func makeStdBoolValuePtrUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &boolValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&m.Value)) - return b[x:], nil - } -} - -func makeStdBoolValuePtrSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &boolValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdBoolValueSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &boolValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdStringValueMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*string) - v := &stringValue{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*string) - v := &stringValue{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdStringValuePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*string) - v := &stringValue{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*string) - v := &stringValue{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdStringValueSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(string) - v := &stringValue{t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(string) - v := &stringValue{t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdStringValuePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*string) - v := &stringValue{*t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*string) - v := &stringValue{*t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdStringValueUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &stringValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(m.Value)) - return b[x:], nil - } -} - -func makeStdStringValuePtrUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &stringValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&m.Value)) - return b[x:], nil - } -} - -func makeStdStringValuePtrSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &stringValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdStringValueSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &stringValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdBytesValueMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - t := ptr.asPointerTo(u.typ).Interface().(*[]byte) - v := &bytesValue{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - t := ptr.asPointerTo(u.typ).Interface().(*[]byte) - v := &bytesValue{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdBytesValuePtrMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - if ptr.isNil() { - return 0 - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*[]byte) - v := &bytesValue{*t} - siz := Size(v) - return tagsize + SizeVarint(uint64(siz)) + siz - }, func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - if ptr.isNil() { - return b, nil - } - t := ptr.asPointerTo(reflect.PtrTo(u.typ)).Elem().Interface().(*[]byte) - v := &bytesValue{*t} - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(buf))) - b = append(b, buf...) - return b, nil - } -} - -func makeStdBytesValueSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(u.typ) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().([]byte) - v := &bytesValue{t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(u.typ) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().([]byte) - v := &bytesValue{t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdBytesValuePtrSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - n := 0 - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*[]byte) - v := &bytesValue{*t} - siz := Size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getSlice(reflect.PtrTo(u.typ)) - for i := 0; i < s.Len(); i++ { - elem := s.Index(i) - t := elem.Interface().(*[]byte) - v := &bytesValue{*t} - siz := Size(v) - buf, err := Marshal(v) - if err != nil { - return nil, err - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(siz)) - b = append(b, buf...) - } - - return b, nil - } -} - -func makeStdBytesValueUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &bytesValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(sub.typ).Elem() - s.Set(reflect.ValueOf(m.Value)) - return b[x:], nil - } -} - -func makeStdBytesValuePtrUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &bytesValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - s := f.asPointerTo(reflect.PtrTo(sub.typ)).Elem() - s.Set(reflect.ValueOf(&m.Value)) - return b[x:], nil - } -} - -func makeStdBytesValuePtrSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &bytesValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(reflect.PtrTo(sub.typ)) - newSlice := reflect.Append(slice, reflect.ValueOf(&m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} - -func makeStdBytesValueSliceUnmarshaler(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return nil, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - m := &bytesValue{} - if err := Unmarshal(b[:x], m); err != nil { - return nil, err - } - slice := f.getSlice(sub.typ) - newSlice := reflect.Append(slice, reflect.ValueOf(m.Value)) - slice.Set(newSlice) - return b[x:], nil - } -} diff --git a/vendor/github.com/gogo/protobuf/proto/wrappers_gogo.go b/vendor/github.com/gogo/protobuf/proto/wrappers_gogo.go deleted file mode 100644 index c1cf7bf..0000000 --- a/vendor/github.com/gogo/protobuf/proto/wrappers_gogo.go +++ /dev/null @@ -1,113 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2018, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -type float64Value struct { - Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` -} - -func (m *float64Value) Reset() { *m = float64Value{} } -func (*float64Value) ProtoMessage() {} -func (*float64Value) String() string { return "float64" } - -type float32Value struct { - Value float32 `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"` -} - -func (m *float32Value) Reset() { *m = float32Value{} } -func (*float32Value) ProtoMessage() {} -func (*float32Value) String() string { return "float32" } - -type int64Value struct { - Value int64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` -} - -func (m *int64Value) Reset() { *m = int64Value{} } -func (*int64Value) ProtoMessage() {} -func (*int64Value) String() string { return "int64" } - -type uint64Value struct { - Value uint64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` -} - -func (m *uint64Value) Reset() { *m = uint64Value{} } -func (*uint64Value) ProtoMessage() {} -func (*uint64Value) String() string { return "uint64" } - -type int32Value struct { - Value int32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` -} - -func (m *int32Value) Reset() { *m = int32Value{} } -func (*int32Value) ProtoMessage() {} -func (*int32Value) String() string { return "int32" } - -type uint32Value struct { - Value uint32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` -} - -func (m *uint32Value) Reset() { *m = uint32Value{} } -func (*uint32Value) ProtoMessage() {} -func (*uint32Value) String() string { return "uint32" } - -type boolValue struct { - Value bool `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` -} - -func (m *boolValue) Reset() { *m = boolValue{} } -func (*boolValue) ProtoMessage() {} -func (*boolValue) String() string { return "bool" } - -type stringValue struct { - Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` -} - -func (m *stringValue) Reset() { *m = stringValue{} } -func (*stringValue) ProtoMessage() {} -func (*stringValue) String() string { return "string" } - -type bytesValue struct { - Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` -} - -func (m *bytesValue) Reset() { *m = bytesValue{} } -func (*bytesValue) ProtoMessage() {} -func (*bytesValue) String() string { return "[]byte" } - -func init() { - RegisterType((*float64Value)(nil), "gogo.protobuf.proto.DoubleValue") - RegisterType((*float32Value)(nil), "gogo.protobuf.proto.FloatValue") - RegisterType((*int64Value)(nil), "gogo.protobuf.proto.Int64Value") - RegisterType((*uint64Value)(nil), "gogo.protobuf.proto.UInt64Value") - RegisterType((*int32Value)(nil), "gogo.protobuf.proto.Int32Value") - RegisterType((*uint32Value)(nil), "gogo.protobuf.proto.UInt32Value") - RegisterType((*boolValue)(nil), "gogo.protobuf.proto.BoolValue") - RegisterType((*stringValue)(nil), "gogo.protobuf.proto.StringValue") - RegisterType((*bytesValue)(nil), "gogo.protobuf.proto.BytesValue") -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go index ae5b7da..51c75a7 100644 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go +++ b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go @@ -81,7 +81,7 @@ func (c *Cache) fileName(id [HashSize]byte, key string) string { var errMissing = errors.New("cache entry not found") func IsErrMissing(err error) bool { - return err == errMissing + return errors.Cause(err) == errMissing } const ( @@ -199,26 +199,6 @@ func (c *Cache) get(id ActionID) (Entry, error) { return Entry{buf, size, time.Unix(0, tm)}, nil } -// GetFile looks up the action ID in the cache and returns -// the name of the corresponding data file. -func (c *Cache) GetFile(id ActionID) (file string, entry Entry, err error) { - entry, err = c.Get(id) - if err != nil { - return "", Entry{}, err - } - - file, err = c.OutputFile(entry.OutputID) - if err != nil { - return "", Entry{}, err - } - - info, err := os.Stat(file) - if err != nil || info.Size() != entry.Size { - return "", Entry{}, errMissing - } - return file, entry, nil -} - // GetBytes looks up the action ID in the cache and returns // the corresponding output bytes. // GetBytes should only be used for data that can be expected to fit in memory. @@ -282,6 +262,9 @@ const ( func (c *Cache) used(file string) error { info, err := os.Stat(file) if err != nil { + if os.IsNotExist(err) { + return errMissing + } return errors.Wrapf(err, "failed to stat file %s", file) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/completion.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/completion.go index 7d919d1..e2be6f2 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/completion.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/completion.go @@ -28,6 +28,20 @@ func (e *Executor) initCompletion() { RunE: e.executeZshCompletion, } completionCmd.AddCommand(zshCmd) + + fishCmd := &cobra.Command{ + Use: "fish", + Short: "Output fish completion script", + RunE: e.executeFishCompletion, + } + completionCmd.AddCommand(fishCmd) + + powerShell := &cobra.Command{ + Use: "powershell", + Short: "Output powershell completion script", + RunE: e.executePowerShellCompletion, + } + completionCmd.AddCommand(powerShell) } func (e *Executor) executeBashCompletion(cmd *cobra.Command, args []string) error { @@ -51,3 +65,21 @@ func (e *Executor) executeZshCompletion(cmd *cobra.Command, args []string) error return nil } + +func (e *Executor) executeFishCompletion(cmd *cobra.Command, args []string) error { + err := cmd.Root().GenFishCompletion(os.Stdout, true) + if err != nil { + return errors.Wrap(err, "generate fish completion") + } + + return nil +} + +func (e *Executor) executePowerShellCompletion(cmd *cobra.Command, args []string) error { + err := cmd.Root().GenPowerShellCompletion(os.Stdout) + if err != nil { + return errors.Wrap(err, "generate powershell completion") + } + + return nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go index 9581aca..0becc99 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go @@ -218,11 +218,15 @@ func (e *Executor) acquireFileLock() bool { lockFile := filepath.Join(os.TempDir(), "golangci-lint.lock") e.debugf("Locking on file %s...", lockFile) f := flock.New(lockFile) - const totalTimeout = 5 * time.Second const retryDelay = time.Second - ctx, finish := context.WithTimeout(context.Background(), totalTimeout) - defer finish() + ctx := context.Background() + if !e.cfg.Run.AllowSerialRunners { + const totalTimeout = 5 * time.Second + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, totalTimeout) + defer cancel() + } if ok, _ := f.TryLockContext(ctx, retryDelay); !ok { return false } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go index e4887d5..357e274 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go @@ -47,9 +47,6 @@ func getDefaultDirectoryExcludeHelp() string { return strings.Join(parts, "\n") } -const welcomeMessage = "Run this tool in cloud on every github pull " + - "request in https://golangci.com for free (public repos)" - func wh(text string) string { return color.GreenString(text) } @@ -80,7 +77,9 @@ func initFlagSet(fs *pflag.FlagSet, cfg *config.Config, m *lintersdb.Manager, is fs.BoolVar(&oc.PrintIssuedLine, "print-issued-lines", true, wh("Print lines of code with issue")) fs.BoolVar(&oc.PrintLinterName, "print-linter-name", true, wh("Print linter name in issue line")) fs.BoolVar(&oc.UniqByLine, "uniq-by-line", true, wh("Make issues output unique by line")) + fs.BoolVar(&oc.SortResults, "sort-results", false, wh("Sort linter results")) fs.BoolVar(&oc.PrintWelcomeMessage, "print-welcome", false, wh("Print welcome message")) + fs.StringVar(&oc.PathPrefix, "path-prefix", "", wh("Path prefix to add to output")) hideFlag("print-welcome") // no longer used // Run config @@ -109,6 +108,9 @@ func initFlagSet(fs *pflag.FlagSet, cfg *config.Config, m *lintersdb.Manager, is const allowParallelDesc = "Allow multiple parallel golangci-lint instances running. " + "If false (default) - golangci-lint acquires file lock on start." fs.BoolVar(&rc.AllowParallelRunners, "allow-parallel-runners", false, wh(allowParallelDesc)) + const allowSerialDesc = "Allow multiple golangci-lint instances running, but serialize them around a lock. " + + "If false (default) - golangci-lint exits with an error if it fails to acquire file lock on start." + fs.BoolVar(&rc.AllowSerialRunners, "allow-serial-runners", false, wh(allowSerialDesc)) // Linters settings config lsc := &cfg.LintersSettings @@ -153,12 +155,27 @@ func initFlagSet(fs *pflag.FlagSet, cfg *config.Config, m *lintersdb.Manager, is 150, "Dupl: Minimal threshold to detect copy-paste") hideFlag("dupl.threshold") + fs.BoolVar(&lsc.Goconst.MatchWithConstants, "goconst.match-constant", + true, "Goconst: look for existing constants matching the values") + hideFlag("goconst.match-constant") fs.IntVar(&lsc.Goconst.MinStringLen, "goconst.min-len", 3, "Goconst: minimum constant string length") hideFlag("goconst.min-len") fs.IntVar(&lsc.Goconst.MinOccurrencesCount, "goconst.min-occurrences", 3, "Goconst: minimum occurrences of constant string count to trigger issue") hideFlag("goconst.min-occurrences") + fs.BoolVar(&lsc.Goconst.ParseNumbers, "goconst.numbers", + false, "Goconst: search also for duplicated numbers") + hideFlag("goconst.numbers") + fs.IntVar(&lsc.Goconst.NumberMin, "goconst.min", + 3, "minimum value, only works with goconst.numbers") + hideFlag("goconst.min") + fs.IntVar(&lsc.Goconst.NumberMax, "goconst.max", + 3, "maximum value, only works with goconst.numbers") + hideFlag("goconst.max") + fs.BoolVar(&lsc.Goconst.IgnoreCalls, "goconst.ignore-calls", + true, "Goconst: ignore when constant is not used as function argument") + hideFlag("goconst.ignore-calls") // (@dixonwille) These flag is only used for testing purposes. fs.StringSliceVar(&lsc.Depguard.Packages, "depguard.packages", nil, @@ -232,6 +249,7 @@ func (e *Executor) getConfigForCommandLine() (*config.Config, error) { // Use another config variable here, not e.cfg, to not // affect main parsing by this parsing of only config option. initFlagSet(fs, &cfg, e.DBManager, false) + initVersionFlagSet(fs, &cfg) // Parse max options, even force version option: don't want // to get access to Executor here: it's error-prone to use @@ -253,7 +271,7 @@ func (e *Executor) getConfigForCommandLine() (*config.Config, error) { func (e *Executor) initRun() { e.runCmd = &cobra.Command{ Use: "run", - Short: welcomeMessage, + Short: "Run the linters", Run: e.executeRun, PreRun: func(_ *cobra.Command, _ []string) { if ok := e.acquireFileLock(); !ok { @@ -266,7 +284,8 @@ func (e *Executor) initRun() { } e.rootCmd.AddCommand(e.runCmd) - e.runCmd.SetOutput(logutils.StdOut) // use custom output to properly color it in Windows terminals + e.runCmd.SetOut(logutils.StdOut) // use custom output to properly color it in Windows terminals + e.runCmd.SetErr(logutils.StdErr) e.initRunConfiguration(e.runCmd) } @@ -446,8 +465,8 @@ func (e *Executor) executeRun(_ *cobra.Command, args []string) { // to be removed when deadline is finally decommissioned func (e *Executor) setTimeoutToDeadlineIfOnlyDeadlineIsSet() { - //lint:ignore SA1019 We want to promoted the deprecated config value when needed - deadlineValue := e.cfg.Run.Deadline // nolint:staticcheck + // nolint:staticcheck + deadlineValue := e.cfg.Run.Deadline if deadlineValue != 0 && e.cfg.Run.Timeout == defaultTimeout { e.cfg.Run.Timeout = deadlineValue } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go index fdb5aa8..3918d6b 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go @@ -1,17 +1,59 @@ package commands import ( + "encoding/json" + "strings" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + + "github.com/golangci/golangci-lint/pkg/config" ) +type jsonVersion struct { + Version string `json:"version"` + Commit string `json:"commit"` + Date string `json:"date"` +} + +func (e *Executor) initVersionConfiguration(cmd *cobra.Command) { + fs := cmd.Flags() + fs.SortFlags = false // sort them as they are defined here + initVersionFlagSet(fs, e.cfg) +} + +func initVersionFlagSet(fs *pflag.FlagSet, cfg *config.Config) { + // Version config + vc := &cfg.Version + fs.StringVar(&vc.Format, "format", "", wh("The version's format can be: 'short', 'json'")) +} + func (e *Executor) initVersion() { versionCmd := &cobra.Command{ Use: "version", Short: "Version", - Run: func(cmd *cobra.Command, _ []string) { - cmd.Printf("golangci-lint has version %s built from %s on %s\n", e.version, e.commit, e.date) + RunE: func(cmd *cobra.Command, _ []string) error { + switch strings.ToLower(e.cfg.Version.Format) { + case "short": + cmd.Println(e.version) + case "json": + ver := jsonVersion{ + Version: e.version, + Commit: e.commit, + Date: e.date, + } + data, err := json.Marshal(&ver) + if err != nil { + return err + } + cmd.Println(string(data)) + default: + cmd.Printf("golangci-lint has version %s built from %s on %s\n", e.version, e.commit, e.date) + } + return nil }, } e.rootCmd.AddCommand(versionCmd) + e.initVersionConfiguration(versionCmd) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go index 89e3559..2e95c16 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go @@ -99,22 +99,33 @@ var DefaultExcludePatterns = []ExcludePattern{ Linter: "gosec", Why: "False positive is triggered by 'src, err := ioutil.ReadFile(filename)'", }, + { + ID: "EXC0011", + Pattern: "(comment on exported (method|function|type|const)|" + + "should have( a package)? comment|comment should be of the form)", + Linter: "stylecheck", + Why: "Annoying issue about not having a comment. The rare codebase has such comments", + }, } func GetDefaultExcludePatternsStrings() []string { - return GetExcludePatternsStrings(nil) + ret := make([]string, len(DefaultExcludePatterns)) + for i, p := range DefaultExcludePatterns { + ret[i] = p.Pattern + } + return ret } -func GetExcludePatternsStrings(include []string) []string { +func GetExcludePatterns(include []string) []ExcludePattern { includeMap := make(map[string]bool, len(include)) for _, inc := range include { includeMap[inc] = true } - var ret []string + var ret []ExcludePattern for _, p := range DefaultExcludePatterns { if !includeMap[p.ID] { - ret = append(ret, p.Pattern) + ret = append(ret, p) } } @@ -152,9 +163,13 @@ type Run struct { UseDefaultSkipDirs bool `mapstructure:"skip-dirs-use-default"` AllowParallelRunners bool `mapstructure:"allow-parallel-runners"` + AllowSerialRunners bool `mapstructure:"allow-serial-runners"` } type LintersSettings struct { + Gci struct { + LocalPrefixes string `mapstructure:"local-prefixes"` + } Govet GovetSettings Golint struct { MinConfidence float64 `mapstructure:"min-confidence"` @@ -181,8 +196,13 @@ type LintersSettings struct { Threshold int } Goconst struct { - MinStringLen int `mapstructure:"min-len"` - MinOccurrencesCount int `mapstructure:"min-occurrences"` + MatchWithConstants bool `mapstructure:"match-constant"` + MinStringLen int `mapstructure:"min-len"` + MinOccurrencesCount int `mapstructure:"min-occurrences"` + ParseNumbers bool `mapstructure:"numbers"` + NumberMin int `mapstructure:"min"` + NumberMax int `mapstructure:"max"` + IgnoreCalls bool `mapstructure:"ignore-calls"` } Gomnd struct { Settings map[string]map[string]interface{} @@ -221,6 +241,11 @@ type LintersSettings struct { Recommendations []string `mapstructure:"recommendations"` Reason string `mapstructure:"reason"` } `mapstructure:"modules"` + Versions []map[string]struct { + Version string `mapstructure:"version"` + Reason string `mapstructure:"reason"` + } `mapstructure:"versions"` + LocalReplaceDirectives bool `mapstructure:"local_replace_directives"` } `mapstructure:"blocked"` } @@ -235,13 +260,28 @@ type LintersSettings struct { Dogsled DogsledSettings Gocognit GocognitSettings Godot GodotSettings + Goheader GoHeaderSettings Testpackage TestpackageSettings Nestif NestifSettings NoLintLint NoLintLintSettings + Exhaustive ExhaustiveSettings + Gofumpt GofumptSettings + ErrorLint ErrorLintSettings + Makezero MakezeroSettings + Thelper ThelperSettings + Forbidigo ForbidigoSettings + Ifshort IfshortSettings + Predeclared PredeclaredSettings Custom map[string]CustomLinterSettings } +type GoHeaderSettings struct { + Values map[string]map[string]string `mapstructure:"values"` + Template string `mapstructure:"template"` + TemplatePath string `mapstructure:"template-path"` +} + type GovetSettings struct { CheckShadowing bool `mapstructure:"check-shadowing"` Settings map[string]map[string]interface{} @@ -316,6 +356,10 @@ type WSLSettings struct { } type GodotSettings struct { + Scope string `mapstructure:"scope"` + Capital bool `mapstructure:"capital"` + + // Deprecated: use `Scope` instead CheckAll bool `mapstructure:"check-all"` } @@ -335,6 +379,50 @@ type NestifSettings struct { MinComplexity int `mapstructure:"min-complexity"` } +type ExhaustiveSettings struct { + CheckGenerated bool `mapstructure:"check-generated"` + DefaultSignifiesExhaustive bool `mapstructure:"default-signifies-exhaustive"` +} + +type GofumptSettings struct { + ExtraRules bool `mapstructure:"extra-rules"` +} + +type ErrorLintSettings struct { + Errorf bool `mapstructure:"errorf"` +} + +type MakezeroSettings struct { + Always bool +} + +type ThelperSettings struct { + Test struct { + First bool `mapstructure:"first"` + Name bool `mapstructure:"name"` + Begin bool `mapstructure:"begin"` + } `mapstructure:"test"` + Benchmark struct { + First bool `mapstructure:"first"` + Name bool `mapstructure:"name"` + Begin bool `mapstructure:"begin"` + } `mapstructure:"benchmark"` +} + +type IfshortSettings struct { + MaxDeclLines int `mapstructure:"max-decl-lines"` + MaxDeclChars int `mapstructure:"max-decl-chars"` +} + +type ForbidigoSettings struct { + Forbid []string `mapstructure:"forbid"` +} + +type PredeclaredSettings struct { + Ignore string `mapstructure:"ignore"` + Qualified bool `mapstructure:"q"` +} + var defaultLintersSettings = LintersSettings{ Lll: LllSettings{ LineLength: 120, @@ -385,6 +473,20 @@ var defaultLintersSettings = LintersSettings{ Nestif: NestifSettings{ MinComplexity: 5, }, + Exhaustive: ExhaustiveSettings{ + CheckGenerated: false, + DefaultSignifiesExhaustive: false, + }, + Gofumpt: GofumptSettings{ + ExtraRules: false, + }, + ErrorLint: ErrorLintSettings{ + Errorf: true, + }, + Predeclared: PredeclaredSettings{ + Ignore: "", + Qualified: false, + }, } type CustomLinterSettings struct { @@ -403,51 +505,71 @@ type Linters struct { Presets []string } -type ExcludeRule struct { +type BaseRule struct { Linters []string Path string Text string Source string } -func validateOptionalRegex(value string) error { - if value == "" { - return nil - } - _, err := regexp.Compile(value) - return err -} - -func (e ExcludeRule) Validate() error { - if err := validateOptionalRegex(e.Path); err != nil { +func (b BaseRule) Validate(minConditionsCount int) error { + if err := validateOptionalRegex(b.Path); err != nil { return fmt.Errorf("invalid path regex: %v", err) } - if err := validateOptionalRegex(e.Text); err != nil { + if err := validateOptionalRegex(b.Text); err != nil { return fmt.Errorf("invalid text regex: %v", err) } - if err := validateOptionalRegex(e.Source); err != nil { + if err := validateOptionalRegex(b.Source); err != nil { return fmt.Errorf("invalid source regex: %v", err) } nonBlank := 0 - if len(e.Linters) > 0 { + if len(b.Linters) > 0 { nonBlank++ } - if e.Path != "" { + if b.Path != "" { nonBlank++ } - if e.Text != "" { + if b.Text != "" { nonBlank++ } - if e.Source != "" { + if b.Source != "" { nonBlank++ } - const minConditionsCount = 2 if nonBlank < minConditionsCount { - return errors.New("at least 2 of (text, source, path, linters) should be set") + return fmt.Errorf("at least %d of (text, source, path, linters) should be set", minConditionsCount) } return nil } +const excludeRuleMinConditionsCount = 2 + +type ExcludeRule struct { + BaseRule `mapstructure:",squash"` +} + +func validateOptionalRegex(value string) error { + if value == "" { + return nil + } + _, err := regexp.Compile(value) + return err +} + +func (e ExcludeRule) Validate() error { + return e.BaseRule.Validate(excludeRuleMinConditionsCount) +} + +const severityRuleMinConditionsCount = 1 + +type SeverityRule struct { + BaseRule `mapstructure:",squash"` + Severity string +} + +func (s *SeverityRule) Validate() error { + return s.BaseRule.Validate(severityRuleMinConditionsCount) +} + type Issues struct { IncludeDefaultExcludes []string `mapstructure:"include"` ExcludeCaseSensitive bool `mapstructure:"exclude-case-sensitive"` @@ -465,21 +587,35 @@ type Issues struct { NeedFix bool `mapstructure:"fix"` } +type Severity struct { + Default string `mapstructure:"default-severity"` + CaseSensitive bool `mapstructure:"case-sensitive"` + Rules []SeverityRule `mapstructure:"rules"` +} + +type Version struct { + Format string `mapstructure:"format"` +} + type Config struct { Run Run Output struct { Format string Color string - PrintIssuedLine bool `mapstructure:"print-issued-lines"` - PrintLinterName bool `mapstructure:"print-linter-name"` - UniqByLine bool `mapstructure:"uniq-by-line"` - PrintWelcomeMessage bool `mapstructure:"print-welcome"` + PrintIssuedLine bool `mapstructure:"print-issued-lines"` + PrintLinterName bool `mapstructure:"print-linter-name"` + UniqByLine bool `mapstructure:"uniq-by-line"` + SortResults bool `mapstructure:"sort-results"` + PrintWelcomeMessage bool `mapstructure:"print-welcome"` + PathPrefix string `mapstructure:"path-prefix"` } LintersSettings LintersSettings `mapstructure:"linters-settings"` Linters Linters Issues Issues + Severity Severity + Version Version InternalTest bool // Option is used only for testing golangci-lint code, don't use it } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/config_gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/config/config_gocritic.go index bbfd7bb..ac0d644 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/config_gocritic.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/config_gocritic.go @@ -5,7 +5,7 @@ import ( "sort" "strings" - "github.com/go-lintpack/lintpack" + "github.com/go-critic/go-critic/framework/linter" "github.com/pkg/errors" _ "github.com/go-critic/go-critic/checkers" // this import register checkers @@ -18,9 +18,9 @@ const gocriticDebugKey = "gocritic" var ( gocriticDebugf = logutils.Debug(gocriticDebugKey) isGocriticDebug = logutils.HaveDebugTag(gocriticDebugKey) - allGocriticCheckers = lintpack.GetCheckersInfo() - allGocriticCheckerMap = func() map[string]*lintpack.CheckerInfo { - checkInfoMap := make(map[string]*lintpack.CheckerInfo) + allGocriticCheckers = linter.GetCheckersInfo() + allGocriticCheckerMap = func() map[string]*linter.CheckerInfo { + checkInfoMap := make(map[string]*linter.CheckerInfo) for _, checkInfo := range allGocriticCheckers { checkInfoMap[checkInfo.Name] = checkInfo } @@ -242,13 +242,7 @@ func (s *GocriticSettings) Validate(log logutils.Log) error { return errors.Wrap(err, "validate disabled checks") } - for checkName := range s.SettingsPerCheck { - if !s.IsCheckEnabled(checkName) { - log.Warnf("Gocritic settings were provided for not enabled check %q", checkName) - } - } - - if err := s.validateCheckerNames(); err != nil { + if err := s.validateCheckerNames(log); err != nil { return errors.Wrap(err, "validation failed") } @@ -272,6 +266,7 @@ func sprintStrings(ss []string) string { return fmt.Sprint(ss) } +// getAllCheckerNames returns a map containing all checker names supported by gocritic. func getAllCheckerNames() map[string]bool { allCheckerNames := map[string]bool{} for _, checker := range allGocriticCheckers { @@ -281,7 +276,7 @@ func getAllCheckerNames() map[string]bool { return allCheckerNames } -func isEnabledByDefaultGocriticCheck(info *lintpack.CheckerInfo) bool { +func isEnabledByDefaultGocriticCheck(info *linter.CheckerInfo) bool { return !info.HasTag("experimental") && !info.HasTag("opinionated") && !info.HasTag("performance") @@ -290,9 +285,6 @@ func isEnabledByDefaultGocriticCheck(info *lintpack.CheckerInfo) bool { func getDefaultEnabledGocriticCheckersNames() []string { var enabled []string for _, info := range allGocriticCheckers { - // get in sync with lintpack behavior in bindDefaultEnabledList - // in https://github.com/go-lintpack/lintpack/blob/master/linter/lintmain/internal/check/check.go#L317 - enable := isEnabledByDefaultGocriticCheck(info) if enable { enabled = append(enabled, info.Name) @@ -305,9 +297,6 @@ func getDefaultEnabledGocriticCheckersNames() []string { func getDefaultDisabledGocriticCheckersNames() []string { var disabled []string for _, info := range allGocriticCheckers { - // get in sync with lintpack behavior in bindDefaultEnabledList - // in https://github.com/go-lintpack/lintpack/blob/master/linter/lintmain/internal/check/check.go#L317 - enable := isEnabledByDefaultGocriticCheck(info) if !enable { disabled = append(disabled, info.Name) @@ -317,7 +306,7 @@ func getDefaultDisabledGocriticCheckersNames() []string { return disabled } -func (s *GocriticSettings) validateCheckerNames() error { +func (s *GocriticSettings) validateCheckerNames(log logutils.Log) error { allowedNames := getAllCheckerNames() for _, name := range s.EnabledChecks { @@ -334,6 +323,16 @@ func (s *GocriticSettings) validateCheckerNames() error { } } + for checkName := range s.SettingsPerCheck { + if _, ok := allowedNames[checkName]; !ok { + return fmt.Errorf("invalid setting, checker %s doesn't exist, all existing checkers: %s", + checkName, sprintAllowedCheckerNames(allowedNames)) + } + if !s.IsCheckEnabled(checkName) { + log.Warnf("Gocritic settings were provided for not enabled check %q", checkName) + } + } + return nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go b/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go index 1e355e7..00722ba 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go @@ -12,6 +12,7 @@ import ( "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/sliceutil" ) type FileReader struct { @@ -113,6 +114,14 @@ func (r *FileReader) validateConfig() error { return fmt.Errorf("error in exclude rule #%d: %v", i, err) } } + if len(c.Severity.Rules) > 0 && c.Severity.Default == "" { + return errors.New("can't set severity rule option: no default severity defined") + } + for i, rule := range c.Severity.Rules { + if err := rule.Validate(); err != nil { + return fmt.Errorf("error in severity rule #%d: %v", i, err) + } + } if err := c.LintersSettings.Govet.Validate(); err != nil { return fmt.Errorf("error in govet config: %v", err) } @@ -162,6 +171,7 @@ func (r *FileReader) setupConfigFileSearch() { // find all dirs from it up to the root configSearchPaths := []string{"./"} + for { configSearchPaths = append(configSearchPaths, curDir) newCurDir := filepath.Dir(curDir) @@ -171,6 +181,13 @@ func (r *FileReader) setupConfigFileSearch() { curDir = newCurDir } + // find home directory for global config + if home, err := homedir.Dir(); err != nil { + r.log.Warnf("Can't get user's home directory: %s", err.Error()) + } else if !sliceutil.Contains(configSearchPaths, home) { + configSearchPaths = append(configSearchPaths, home) + } + r.log.Infof("Config search paths: %s", configSearchPaths) viper.SetConfigName(".golangci") for _, p := range configSearchPaths { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go new file mode 100644 index 0000000..5b656d1 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go @@ -0,0 +1,27 @@ +package golinters + +import ( + "github.com/polyfloyd/go-errorlint/errorlint" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewErrorLint(cfg *config.ErrorLintSettings) *goanalysis.Linter { + a := errorlint.NewAnalyzer() + cfgMap := map[string]map[string]interface{}{} + if cfg != nil { + cfgMap[a.Name] = map[string]interface{}{ + "errorf": cfg.Errorf, + } + } + return goanalysis.NewLinter( + "errorlint", + "go-errorlint is a source code linter for Go software "+ + "that can be used to find code that will cause problems"+ + "with the error wrapping scheme introduced in Go 1.13.", + []*analysis.Analyzer{a}, + cfgMap, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go new file mode 100644 index 0000000..85534d4 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go @@ -0,0 +1,26 @@ +package golinters + +import ( + "github.com/nishanths/exhaustive" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewExhaustive(settings *config.ExhaustiveSettings) *goanalysis.Linter { + a := exhaustive.Analyzer + + var cfg map[string]map[string]interface{} + if settings != nil { + cfg = map[string]map[string]interface{}{ + a.Name: { + exhaustive.CheckGeneratedFlag: settings.CheckGenerated, + exhaustive.DefaultSignifiesExhaustiveFlag: settings.DefaultSignifiesExhaustive, + }, + } + } + + return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, cfg). + WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go new file mode 100644 index 0000000..d847461 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go @@ -0,0 +1,17 @@ +package golinters + +import ( + "github.com/mbilski/exhaustivestruct/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewExhaustiveStruct() *goanalysis.Linter { + return goanalysis.NewLinter( + "exhaustivestruct", + "Checks if all struct's fields are initialized", + []*analysis.Analyzer{analyzer.Analyzer}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go new file mode 100644 index 0000000..1131c57 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/kyoh86/exportloopref" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewExportLoopRef() *goanalysis.Linter { + a := exportloopref.Analyzer + + return goanalysis.NewLinter( + a.Name, + a.Doc, + []*analysis.Analyzer{a}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go new file mode 100644 index 0000000..41d036c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go @@ -0,0 +1,65 @@ +package golinters + +import ( + "sync" + + "github.com/ashanbrown/forbidigo/forbidigo" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewForbidigo() *goanalysis.Linter { + const linterName = "forbidigo" + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + linterName, + "Forbids identifiers", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + s := &lintCtx.Settings().Forbidigo + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var res []goanalysis.Issue + forbid, err := forbidigo.NewLinter(s.Forbid) + if err != nil { + return nil, errors.Wrapf(err, "failed to create linter %q", linterName) + } + + for _, file := range pass.Files { + hints, err := forbid.Run(pass.Fset, file) + if err != nil { + return nil, errors.Wrapf(err, "forbidigo linter failed on file %q", file.Name.String()) + } + for _, hint := range hints { + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: hint.Position(), + Text: hint.Details(), + FromLinter: linterName, + }, pass)) + } + } + + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go new file mode 100644 index 0000000..6fa4354 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go @@ -0,0 +1,92 @@ +package golinters + +import ( + "bytes" + "fmt" + "sync" + + "github.com/daixiang0/gci/pkg/gci" + "github.com/pkg/errors" + "github.com/shazow/go-diff/difflib" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" +) + +const gciName = "gci" + +func NewGci() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + differ := difflib.New() + + analyzer := &analysis.Analyzer{ + Name: gciName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + gciName, + "Gci control golang package import order and make it always deterministic.", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + localFlag := lintCtx.Settings().Gci.LocalPrefixes + goimportsFlag := lintCtx.Settings().Goimports.LocalPrefixes + if localFlag == "" && goimportsFlag != "" { + localFlag = goimportsFlag + } + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var fileNames []string + for _, f := range pass.Files { + pos := pass.Fset.PositionFor(f.Pos(), false) + fileNames = append(fileNames, pos.Filename) + } + + var issues []goanalysis.Issue + + for _, f := range fileNames { + source, result, err := gci.Run(f, &gci.FlagSet{LocalFlag: localFlag}) + if err != nil { + return nil, err + } + if result == nil { + continue + } + + diff := bytes.Buffer{} + _, err = diff.WriteString(fmt.Sprintf("--- %[1]s\n+++ %[1]s\n", f)) + if err != nil { + return nil, fmt.Errorf("can't write diff header: %v", err) + } + + err = differ.Diff(&diff, bytes.NewReader(source), bytes.NewReader(result)) + if err != nil { + return nil, fmt.Errorf("can't get gci diff output: %v", err) + } + + is, err := extractIssuesFromPatch(diff.String(), lintCtx.Log, lintCtx, gciName) + if err != nil { + return nil, errors.Wrapf(err, "can't extract issues from gci diff output %q", diff.String()) + } + + for i := range is { + issues = append(issues, goanalysis.NewIssue(&is[i], pass)) + } + } + + if len(issues) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, issues...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go index d072942..8122527 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go @@ -3,7 +3,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package checker defines the implementation of the checker commands. +// Package goanalysis defines the implementation of the checker commands. // The same code drives the multi-analysis driver, the single-analysis // driver that is conventionally provided for convenience along with // each analysis package, and the test driver. @@ -935,7 +935,8 @@ func sizeOfReflectValueTreeBytes(rv reflect.Value, visitedPtrs map[uintptr]struc return rv.Len() case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, - reflect.Uintptr, reflect.Bool, reflect.Float32, reflect.Float64, reflect.UnsafePointer: + reflect.Uintptr, reflect.Bool, reflect.Float32, reflect.Float64, + reflect.Complex64, reflect.Complex128, reflect.Func, reflect.UnsafePointer: return int(rv.Type().Size()) case reflect.Invalid: return 0 diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go index f216641..37fed44 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go @@ -1,100 +1,30 @@ package golinters import ( - "fmt" - "go/ast" - "go/token" - "strings" - "sync" - "golang.org/x/tools/go/analysis" + "4d63.com/gochecknoglobals/checknoglobals" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" ) -const gochecknoglobalsName = "gochecknoglobals" - -//nolint:dupl func NewGochecknoglobals() *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: gochecknoglobalsName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { - var res []goanalysis.Issue - for _, file := range pass.Files { - fileIssues := checkFileForGlobals(file, pass.Fset) - for i := range fileIssues { - res = append(res, goanalysis.NewIssue(&fileIssues[i], pass)) - } - } - if len(res) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, res...) - mu.Unlock() - - return nil, nil + gochecknoglobals := checknoglobals.Analyzer() + + // gochecknoglobals only lints test files if the `-t` flag is passed so we + // pass the `t` flag as true to the analyzer before running it. This can be + // turned of by using the regular golangci-lint flags such as `--tests` or + // `--skip-files`. + linterConfig := map[string]map[string]interface{}{ + gochecknoglobals.Name: { + "t": true, }, } - return goanalysis.NewLinter( - gochecknoglobalsName, - "Checks that no globals are present in Go code", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func checkFileForGlobals(f *ast.File, fset *token.FileSet) []result.Issue { - var res []result.Issue - for _, decl := range f.Decls { - genDecl, ok := decl.(*ast.GenDecl) - if !ok { - continue - } - if genDecl.Tok != token.VAR { - continue - } - - for _, spec := range genDecl.Specs { - valueSpec := spec.(*ast.ValueSpec) - for _, vn := range valueSpec.Names { - if isWhitelisted(vn) { - continue - } - - res = append(res, result.Issue{ - Pos: fset.Position(vn.Pos()), - Text: fmt.Sprintf("%s is a global variable", formatCode(vn.Name, nil)), - FromLinter: gochecknoglobalsName, - }) - } - } - } - - return res -} -func isWhitelisted(i *ast.Ident) bool { - return i.Name == "_" || i.Name == "version" || looksLikeError(i) -} - -// looksLikeError returns true if the AST identifier starts -// with 'err' or 'Err', or false otherwise. -// -// TODO: https://github.com/leighmcculloch/gochecknoglobals/issues/5 -func looksLikeError(i *ast.Ident) bool { - prefix := "err" - if i.IsExported() { - prefix = "Err" - } - return strings.HasPrefix(i.Name, prefix) + return goanalysis.NewLinter( + gochecknoglobals.Name, + gochecknoglobals.Doc, + []*analysis.Analyzer{gochecknoglobals}, + linterConfig, + ).WithLoadMode(goanalysis.LoadModeSyntax) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go index 18465b1..f9715bd 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go @@ -15,7 +15,6 @@ import ( const gochecknoinitsName = "gochecknoinits" -//nolint:dupl func NewGochecknoinits() *goanalysis.Linter { var mu sync.Mutex var resIssues []goanalysis.Issue diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go index 8e91fee..edda310 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go @@ -4,7 +4,7 @@ import ( "fmt" "sync" - goconstAPI "github.com/golangci/goconst" + goconstAPI "github.com/jgautheron/goconst" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" @@ -47,11 +47,17 @@ func NewGoconst() *goanalysis.Linter { func checkConstants(pass *analysis.Pass, lintCtx *linter.Context) ([]goanalysis.Issue, error) { cfg := goconstAPI.Config{ - MatchWithConstants: true, + MatchWithConstants: lintCtx.Settings().Goconst.MatchWithConstants, MinStringLength: lintCtx.Settings().Goconst.MinStringLen, MinOccurrences: lintCtx.Settings().Goconst.MinOccurrencesCount, + ParseNumbers: lintCtx.Settings().Goconst.ParseNumbers, + NumberMin: lintCtx.Settings().Goconst.NumberMin, + NumberMax: lintCtx.Settings().Goconst.NumberMax, + ExcludeTypes: map[goconstAPI.Type]bool{}, + } + if lintCtx.Settings().Goconst.IgnoreCalls { + cfg.ExcludeTypes[goconstAPI.Call] = true } - goconstIssues, err := goconstAPI.Run(pass.Files, pass.Fset, &cfg) if err != nil { return nil, err @@ -63,7 +69,7 @@ func checkConstants(pass *analysis.Pass, lintCtx *linter.Context) ([]goanalysis. res := make([]goanalysis.Issue, 0, len(goconstIssues)) for _, i := range goconstIssues { - textBegin := fmt.Sprintf("string %s has %d occurrences", formatCode(i.Str, lintCtx.Cfg), i.OccurencesCount) + textBegin := fmt.Sprintf("string %s has %d occurrences", formatCode(i.Str, lintCtx.Cfg), i.OccurrencesCount) var textEnd string if i.MatchingConst == "" { textEnd = ", make it a constant" diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go index fb29252..7181c48 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go @@ -10,7 +10,7 @@ import ( "strings" "sync" - "github.com/go-lintpack/lintpack" + gocriticlinter "github.com/go-critic/go-critic/framework/linter" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" @@ -38,15 +38,15 @@ func NewGocritic() *goanalysis.Linter { nil, ).WithContextSetter(func(lintCtx *linter.Context) { analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { - lintpackCtx := lintpack.NewContext(pass.Fset, sizes) - enabledCheckers, err := buildEnabledCheckers(lintCtx, lintpackCtx) + linterCtx := gocriticlinter.NewContext(pass.Fset, sizes) + enabledCheckers, err := buildEnabledCheckers(lintCtx, linterCtx) if err != nil { return nil, err } - lintpackCtx.SetPackageInfo(pass.TypesInfo, pass.Pkg) + linterCtx.SetPackageInfo(pass.TypesInfo, pass.Pkg) var res []goanalysis.Issue - pkgIssues := runGocriticOnPackage(lintpackCtx, enabledCheckers, pass.Files) + pkgIssues := runGocriticOnPackage(linterCtx, enabledCheckers, pass.Files) for i := range pkgIssues { res = append(res, goanalysis.NewIssue(&pkgIssues[i], pass)) } @@ -65,9 +65,9 @@ func NewGocritic() *goanalysis.Linter { }).WithLoadMode(goanalysis.LoadModeTypesInfo) } -func normalizeCheckerInfoParams(info *lintpack.CheckerInfo) lintpack.CheckerParams { +func normalizeCheckerInfoParams(info *gocriticlinter.CheckerInfo) gocriticlinter.CheckerParams { // lowercase info param keys here because golangci-lint's config parser lowercases all strings - ret := lintpack.CheckerParams{} + ret := gocriticlinter.CheckerParams{} for k, v := range info.Params { ret[strings.ToLower(k)] = v } @@ -75,7 +75,7 @@ func normalizeCheckerInfoParams(info *lintpack.CheckerInfo) lintpack.CheckerPara return ret } -func configureCheckerInfo(info *lintpack.CheckerInfo, allParams map[string]config.GocriticCheckSettings) error { +func configureCheckerInfo(info *gocriticlinter.CheckerInfo, allParams map[string]config.GocriticCheckSettings) error { params := allParams[strings.ToLower(info.Name)] if params == nil { // no config for this checker return nil @@ -108,12 +108,12 @@ func configureCheckerInfo(info *lintpack.CheckerInfo, allParams map[string]confi return nil } -func buildEnabledCheckers(lintCtx *linter.Context, lintpackCtx *lintpack.Context) ([]*lintpack.Checker, error) { +func buildEnabledCheckers(lintCtx *linter.Context, linterCtx *gocriticlinter.Context) ([]*gocriticlinter.Checker, error) { s := lintCtx.Settings().Gocritic allParams := s.GetLowercasedParams() - var enabledCheckers []*lintpack.Checker - for _, info := range lintpack.GetCheckersInfo() { + var enabledCheckers []*gocriticlinter.Checker + for _, info := range gocriticlinter.GetCheckersInfo() { if !s.IsCheckEnabled(info.Name) { continue } @@ -122,27 +122,27 @@ func buildEnabledCheckers(lintCtx *linter.Context, lintpackCtx *lintpack.Context return nil, err } - c := lintpack.NewChecker(lintpackCtx, info) + c := gocriticlinter.NewChecker(linterCtx, info) enabledCheckers = append(enabledCheckers, c) } return enabledCheckers, nil } -func runGocriticOnPackage(lintpackCtx *lintpack.Context, checkers []*lintpack.Checker, +func runGocriticOnPackage(linterCtx *gocriticlinter.Context, checkers []*gocriticlinter.Checker, files []*ast.File) []result.Issue { var res []result.Issue for _, f := range files { - filename := filepath.Base(lintpackCtx.FileSet.Position(f.Pos()).Filename) - lintpackCtx.SetFileInfo(filename, f) + filename := filepath.Base(linterCtx.FileSet.Position(f.Pos()).Filename) + linterCtx.SetFileInfo(filename, f) - issues := runGocriticOnFile(lintpackCtx, f, checkers) + issues := runGocriticOnFile(linterCtx, f, checkers) res = append(res, issues...) } return res } -func runGocriticOnFile(ctx *lintpack.Context, f *ast.File, checkers []*lintpack.Checker) []result.Issue { +func runGocriticOnFile(ctx *gocriticlinter.Context, f *ast.File, checkers []*gocriticlinter.Checker) []result.Issue { var res []result.Issue for _, c := range checkers { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go index 7e5b233..e752634 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go @@ -28,12 +28,29 @@ func NewGodot() *goanalysis.Linter { nil, ).WithContextSetter(func(lintCtx *linter.Context) { cfg := lintCtx.Cfg.LintersSettings.Godot - settings := godot.Settings{CheckAll: cfg.CheckAll} + settings := godot.Settings{ + Scope: godot.Scope(cfg.Scope), + Period: true, + Capital: cfg.Capital, + } + + // Convert deprecated setting + if cfg.CheckAll { // nolint: staticcheck + settings.Scope = godot.TopLevelScope + } + + if settings.Scope == "" { + settings.Scope = godot.DeclScope + } analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { - var issues []godot.Message + var issues []godot.Issue for _, file := range pass.Files { - issues = append(issues, godot.Run(file, pass.Fset, settings)...) + iss, err := godot.Run(file, pass.Fset, settings) + if err != nil { + return nil, err + } + issues = append(issues, iss...) } if len(issues) == 0 { @@ -46,6 +63,9 @@ func NewGodot() *goanalysis.Linter { Pos: i.Pos, Text: i.Message, FromLinter: godotName, + Replacement: &result.Replacement{ + NewLines: []string{i.Replacement}, + }, } res[k] = goanalysis.NewIssue(&issue, pass) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go index a7bd88a..aa340dc 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go @@ -46,7 +46,7 @@ func NewGofmt() *goanalysis.Linter { continue } - is, err := extractIssuesFromPatch(string(diff), lintCtx.Log, lintCtx, false) + is, err := extractIssuesFromPatch(string(diff), lintCtx.Log, lintCtx, gofmtName) if err != nil { return nil, errors.Wrapf(err, "can't extract issues from gofmt diff output %q", string(diff)) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go index fb1e3f6..3235622 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go @@ -207,7 +207,40 @@ func (p *hunkChangesParser) parse(h *diffpkg.Hunk) []Change { return p.ret } -func extractIssuesFromPatch(patch string, log logutils.Log, lintCtx *linter.Context, isGoimports bool) ([]result.Issue, error) { +func getErrorTextForLinter(lintCtx *linter.Context, linterName string) string { + text := "File is not formatted" + switch linterName { + case gofumptName: + text = "File is not `gofumpt`-ed" + if lintCtx.Settings().Gofumpt.ExtraRules { + text += " with `-extra`" + } + case gofmtName: + text = "File is not `gofmt`-ed" + if lintCtx.Settings().Gofmt.Simplify { + text += " with `-s`" + } + case goimportsName: + text = "File is not `goimports`-ed" + if lintCtx.Settings().Goimports.LocalPrefixes != "" { + text += " with -local " + lintCtx.Settings().Goimports.LocalPrefixes + } + case gciName: + text = "File is not `gci`-ed" + localPrefixes := lintCtx.Settings().Gci.LocalPrefixes + goimportsFlag := lintCtx.Settings().Goimports.LocalPrefixes + if localPrefixes == "" && goimportsFlag != "" { + localPrefixes = goimportsFlag + } + + if localPrefixes != "" { + text += " with -local " + localPrefixes + } + } + return text +} + +func extractIssuesFromPatch(patch string, log logutils.Log, lintCtx *linter.Context, linterName string) ([]result.Issue, error) { diffs, err := diffpkg.ParseMultiFileDiff([]byte(patch)) if err != nil { return nil, errors.Wrap(err, "can't parse patch") @@ -225,35 +258,19 @@ func extractIssuesFromPatch(patch string, log logutils.Log, lintCtx *linter.Cont } for _, hunk := range d.Hunks { - var text string - if isGoimports { - text = "File is not `goimports`-ed" - if lintCtx.Settings().Goimports.LocalPrefixes != "" { - text += " with -local " + lintCtx.Settings().Goimports.LocalPrefixes - } - } else { - text = "File is not `gofmt`-ed" - if lintCtx.Settings().Gofmt.Simplify { - text += " with `-s`" - } - } p := hunkChangesParser{ log: log, } changes := p.parse(hunk) for _, change := range changes { change := change // fix scope - linterName := gofmtName - if isGoimports { - linterName = goimportsName - } i := result.Issue{ FromLinter: linterName, Pos: token.Position{ Filename: d.NewName, Line: change.LineRange.From, }, - Text: text, + Text: getErrorTextForLinter(lintCtx, linterName), Replacement: &change.Replacement, } if change.LineRange.From != change.LineRange.To { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go new file mode 100644 index 0000000..e91e54e --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go @@ -0,0 +1,92 @@ +package golinters + +import ( + "bytes" + "fmt" + "io/ioutil" + "sync" + + "github.com/pkg/errors" + "github.com/shazow/go-diff/difflib" + "golang.org/x/tools/go/analysis" + "mvdan.cc/gofumpt/format" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" +) + +const gofumptName = "gofumpt" + +func NewGofumpt() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + differ := difflib.New() + + analyzer := &analysis.Analyzer{ + Name: gofumptName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + gofumptName, + "Gofumpt checks whether code was gofumpt-ed.", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var fileNames []string + for _, f := range pass.Files { + pos := pass.Fset.PositionFor(f.Pos(), false) + fileNames = append(fileNames, pos.Filename) + } + + var issues []goanalysis.Issue + + for _, f := range fileNames { + input, err := ioutil.ReadFile(f) + if err != nil { + return nil, fmt.Errorf("unable to open file %s: %w", f, err) + } + output, err := format.Source(input, format.Options{ + ExtraRules: lintCtx.Settings().Gofumpt.ExtraRules, + }) + if err != nil { + return nil, fmt.Errorf("error while running gofumpt: %w", err) + } + if !bytes.Equal(input, output) { + out := bytes.Buffer{} + _, err = out.WriteString(fmt.Sprintf("--- %[1]s\n+++ %[1]s\n", f)) + if err != nil { + return nil, fmt.Errorf("error while running gofumpt: %w", err) + } + + err = differ.Diff(&out, bytes.NewReader(input), bytes.NewReader(output)) + if err != nil { + return nil, fmt.Errorf("error while running gofumpt: %w", err) + } + + diff := out.String() + is, err := extractIssuesFromPatch(diff, lintCtx.Log, lintCtx, gofumptName) + if err != nil { + return nil, errors.Wrapf(err, "can't extract issues from gofumpt diff output %q", diff) + } + + for i := range is { + issues = append(issues, goanalysis.NewIssue(&is[i], pass)) + } + } + } + + if len(issues) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, issues...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go new file mode 100644 index 0000000..2ff587b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go @@ -0,0 +1,85 @@ +package golinters + +import ( + "go/token" + "sync" + + goheader "github.com/denis-tingajkin/go-header" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const goHeaderName = "goheader" + +func NewGoHeader() *goanalysis.Linter { + var mu sync.Mutex + var issues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: goHeaderName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + goHeaderName, + "Checks is file header matches to pattern", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + cfg := lintCtx.Cfg.LintersSettings.Goheader + c := &goheader.Configuration{ + Values: cfg.Values, + Template: cfg.Template, + TemplatePath: cfg.TemplatePath, + } + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + if c.TemplatePath == "" && c.Template == "" { + // User did not pass template, so then do not run go-header linter + return nil, nil + } + template, err := c.GetTemplate() + if err != nil { + return nil, err + } + values, err := c.GetValues() + if err != nil { + return nil, err + } + a := goheader.New(goheader.WithTemplate(template), goheader.WithValues(values)) + var res []goanalysis.Issue + for _, file := range pass.Files { + path := pass.Fset.Position(file.Pos()).Filename + i := a.Analyze(&goheader.Target{ + File: file, + Path: path, + }) + if i == nil { + continue + } + issue := result.Issue{ + Pos: token.Position{ + Line: i.Location().Line + 1, + Column: i.Location().Position, + Filename: path, + }, + Text: i.Message(), + FromLinter: goHeaderName, + } + res = append(res, goanalysis.NewIssue(&issue, pass)) + } + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + issues = append(issues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return issues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go index 97767db..9ea4558 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go @@ -47,7 +47,7 @@ func NewGoimports() *goanalysis.Linter { continue } - is, err := extractIssuesFromPatch(string(diff), lintCtx.Log, lintCtx, true) + is, err := extractIssuesFromPatch(string(diff), lintCtx.Log, lintCtx, goimportsName) if err != nil { return nil, errors.Wrapf(err, "can't extract issues from gofmt diff output %q", string(diff)) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go index 0a64f88..f7e71b7 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go @@ -1,7 +1,7 @@ package golinters import ( - mnd "github.com/tommy-muehle/go-mnd" + mnd "github.com/tommy-muehle/go-mnd/v2" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go index 5b14063..af2e6d1 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go @@ -1,8 +1,6 @@ package golinters import ( - "log" - "os" "sync" "github.com/ryancurrah/gomodguard" @@ -30,7 +28,9 @@ func NewGomodguard() *goanalysis.Linter { return goanalysis.NewLinter( gomodguardName, - "Allow and block list linter for direct Go module dependencies.", + "Allow and block list linter for direct Go module dependencies. "+ + "This is different from depguard where there are different block "+ + "types for example version constraints and module recommendations.", []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { @@ -38,13 +38,13 @@ func NewGomodguard() *goanalysis.Linter { var ( files = []string{} linterCfg = lintCtx.Cfg.LintersSettings.Gomodguard - processorCfg = gomodguard.Configuration{} + processorCfg = &gomodguard.Configuration{} ) processorCfg.Allowed.Modules = linterCfg.Allowed.Modules processorCfg.Allowed.Domains = linterCfg.Allowed.Domains for n := range linterCfg.Blocked.Modules { for k, v := range linterCfg.Blocked.Modules[n] { - m := gomodguard.BlockedModule{k: gomodguard.Recommendations{ + m := map[string]gomodguard.BlockedModule{k: { Recommendations: v.Recommendations, Reason: v.Reason, }} @@ -53,11 +53,24 @@ func NewGomodguard() *goanalysis.Linter { } } + for n := range linterCfg.Blocked.Versions { + for k, v := range linterCfg.Blocked.Versions[n] { + m := map[string]gomodguard.BlockedVersion{k: { + Version: v.Version, + Reason: v.Reason, + }} + processorCfg.Blocked.Versions = append(processorCfg.Blocked.Versions, m) + break + } + } + for _, file := range pass.Files { files = append(files, pass.Fset.PositionFor(file.Pos(), false).Filename) } - processor, err := gomodguard.NewProcessor(processorCfg, log.New(os.Stderr, "", 0)) + processorCfg.Blocked.LocalReplaceDirectives = linterCfg.Blocked.LocalReplaceDirectives + + processor, err := gomodguard.NewProcessor(processorCfg) if err != nil { lintCtx.Log.Warnf("running gomodguard failed: %s: if you are not using go modules "+ "it is suggested to disable this linter", err) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go new file mode 100644 index 0000000..cb54a2f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go @@ -0,0 +1,17 @@ +package golinters + +import ( + "github.com/esimonov/ifshort/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewIfshort() *goanalysis.Linter { + return goanalysis.NewLinter( + "ifshort", + "Checks that your code uses short syntax for if-statements whenever possible", + []*analysis.Analyzer{analyzer.Analyzer}, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go new file mode 100644 index 0000000..7b7ec09 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go @@ -0,0 +1,60 @@ +package golinters + +import ( + "sync" + + "github.com/ashanbrown/makezero/makezero" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const makezeroName = "makezero" + +func NewMakezero() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: makezeroName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + makezeroName, + "Finds slice declarations with non-zero initial length", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + s := &lintCtx.Settings().Makezero + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var res []goanalysis.Issue + linter := makezero.NewLinter(s.Always) + for _, file := range pass.Files { + hints, err := linter.Run(pass.Fset, pass.TypesInfo, file) + if err != nil { + return nil, errors.Wrapf(err, "makezero linter failed on file %q", file.Name.String()) + } + for _, hint := range hints { + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: hint.Position(), + Text: hint.Details(), + FromLinter: makezeroName, + }, pass)) + } + } + if len(res) == 0 { + return nil, nil + } + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax | goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go new file mode 100644 index 0000000..3b661c6 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/ssgreg/nlreturn/v2/pkg/nlreturn" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewNLReturn() *goanalysis.Linter { + return goanalysis.NewLinter( + "nlreturn", + "nlreturn checks for a new line before return and branch statements to increase code clarity", + []*analysis.Analyzer{ + nlreturn.NewAnalyzer(), + }, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go new file mode 100644 index 0000000..b5c4a4b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/sonatard/noctx" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewNoctx() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + noctx.Analyzer, + } + + return goanalysis.NewLinter( + "noctx", + "noctx finds sending http request without context.Context", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go index 6da31c6..d20ac30 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go @@ -47,7 +47,7 @@ type NotSpecific struct { } func (i NotSpecific) Details() string { - return fmt.Sprintf("directive `%s` should mention specific linter such as `//%s:my-linter`", + return fmt.Sprintf("directive `%s` should mention specific linter such as `%s:my-linter`", i.fullDirective, i.directiveWithOptionalLeadingSpace) } @@ -58,7 +58,7 @@ type ParseError struct { } func (i ParseError) Details() string { - return fmt.Sprintf("directive `%s` should match `//%s[:] [// ]`", + return fmt.Sprintf("directive `%s` should match `%s[:] [// ]`", i.fullDirective, i.directiveWithOptionalLeadingSpace) } @@ -112,8 +112,7 @@ const ( NeedsAll = NeedsMachineOnly | NeedsSpecific | NeedsExplanation ) -// matches lines starting with the nolint directive -var directiveOnlyPattern = regexp.MustCompile(`^\s*(nolint)(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\b`) +var commentPattern = regexp.MustCompile(`^//\s*(nolint)(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\b`) // matches a complete nolint directive var fullDirectivePattern = regexp.MustCompile(`^//\s*nolint(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\s*(//.*)?\s*\n?$`) @@ -142,98 +141,102 @@ var trailingBlankExplanation = regexp.MustCompile(`\s*(//\s*)?$`) func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { var issues []Issue + for _, node := range nodes { if file, ok := node.(*ast.File); ok { for _, c := range file.Comments { - text := c.Text() - matches := directiveOnlyPattern.FindStringSubmatch(text) - if len(matches) == 0 { - continue - } - directive := matches[1] + for _, comment := range c.List { + if !commentPattern.MatchString(comment.Text) { + continue + } - // check for a space between the "//" and the directive - leadingSpaceMatches := leadingSpacePattern.FindStringSubmatch(c.List[0].Text) // c.Text() doesn't have all leading space - if len(leadingSpaceMatches) == 0 { - continue - } - leadingSpace := leadingSpaceMatches[1] + // check for a space between the "//" and the directive + leadingSpaceMatches := leadingSpacePattern.FindStringSubmatch(comment.Text) - directiveWithOptionalLeadingSpace := directive - if len(leadingSpace) > 0 { - directiveWithOptionalLeadingSpace = " " + directive - } + var leadingSpace string + if len(leadingSpaceMatches) > 0 { + leadingSpace = leadingSpaceMatches[1] + } - base := BaseIssue{ - fullDirective: c.List[0].Text, - directiveWithOptionalLeadingSpace: directiveWithOptionalLeadingSpace, - position: fset.Position(c.Pos()), - } + directiveWithOptionalLeadingSpace := comment.Text + if len(leadingSpace) > 0 { + split := strings.Split(strings.SplitN(comment.Text, ":", 2)[0], "//") + directiveWithOptionalLeadingSpace = "// " + strings.TrimSpace(split[1]) + } - // check for, report and eliminate leading spaces so we can check for other issues - if leadingSpace != "" && leadingSpace != " " { - issues = append(issues, ExtraLeadingSpace{ - BaseIssue: base, - }) - } + base := BaseIssue{ + fullDirective: comment.Text, + directiveWithOptionalLeadingSpace: directiveWithOptionalLeadingSpace, + position: fset.Position(comment.Pos()), + } - if (l.needs&NeedsMachineOnly) != 0 && strings.HasPrefix(directiveWithOptionalLeadingSpace, " ") { - issues = append(issues, NotMachine{BaseIssue: base}) - } + // check for, report and eliminate leading spaces so we can check for other issues + if len(leadingSpace) > 1 { + issues = append(issues, ExtraLeadingSpace{BaseIssue: base}) + } - fullMatches := fullDirectivePattern.FindStringSubmatch(c.List[0].Text) - if len(fullMatches) == 0 { - issues = append(issues, ParseError{BaseIssue: base}) - continue - } - lintersText, explanation := fullMatches[1], fullMatches[2] - var linters []string - if len(lintersText) > 0 { - lls := strings.Split(lintersText[1:], ",") - linters = make([]string, 0, len(lls)) - for _, ll := range lls { - ll = strings.TrimSpace(ll) - if ll != "" { - linters = append(linters, ll) - } + if (l.needs&NeedsMachineOnly) != 0 && len(leadingSpace) > 0 { + issues = append(issues, NotMachine{BaseIssue: base}) } - } - if (l.needs & NeedsSpecific) != 0 { - if len(linters) == 0 { - issues = append(issues, NotSpecific{BaseIssue: base}) + + fullMatches := fullDirectivePattern.FindStringSubmatch(comment.Text) + if len(fullMatches) == 0 { + issues = append(issues, ParseError{BaseIssue: base}) + continue } - } - // when detecting unused directives, we send all the directives through and filter them out in the nolint processor - if l.needs&NeedsUnused != 0 { - if len(linters) == 0 { - issues = append(issues, UnusedCandidate{BaseIssue: base}) - } else { - for _, linter := range linters { - issues = append(issues, UnusedCandidate{BaseIssue: base, ExpectedLinter: linter}) + lintersText, explanation := fullMatches[1], fullMatches[2] + var linters []string + if len(lintersText) > 0 { + lls := strings.Split(lintersText[1:], ",") + linters = make([]string, 0, len(lls)) + for _, ll := range lls { + ll = strings.TrimSpace(ll) + if ll != "" { + linters = append(linters, ll) + } } } - } - if (l.needs&NeedsExplanation) != 0 && (explanation == "" || strings.TrimSpace(explanation) == "//") { - needsExplanation := len(linters) == 0 // if no linters are mentioned, we must have explanation - // otherwise, check if we are excluding all of the mentioned linters - for _, ll := range linters { - if !l.excludeByLinter[ll] { // if a linter does require explanation - needsExplanation = true - break + if (l.needs & NeedsSpecific) != 0 { + if len(linters) == 0 { + issues = append(issues, NotSpecific{BaseIssue: base}) } } - if needsExplanation { - fullDirectiveWithoutExplanation := trailingBlankExplanation.ReplaceAllString(c.List[0].Text, "") - issues = append(issues, NoExplanation{ - BaseIssue: base, - fullDirectiveWithoutExplanation: fullDirectiveWithoutExplanation, - }) + + // when detecting unused directives, we send all the directives through and filter them out in the nolint processor + if (l.needs & NeedsUnused) != 0 { + if len(linters) == 0 { + issues = append(issues, UnusedCandidate{BaseIssue: base}) + } else { + for _, linter := range linters { + issues = append(issues, UnusedCandidate{BaseIssue: base, ExpectedLinter: linter}) + } + } + } + + if (l.needs&NeedsExplanation) != 0 && (explanation == "" || strings.TrimSpace(explanation) == "//") { + needsExplanation := len(linters) == 0 // if no linters are mentioned, we must have explanation + // otherwise, check if we are excluding all of the mentioned linters + for _, ll := range linters { + if !l.excludeByLinter[ll] { // if a linter does require explanation + needsExplanation = true + break + } + } + + if needsExplanation { + fullDirectiveWithoutExplanation := trailingBlankExplanation.ReplaceAllString(comment.Text, "") + issues = append(issues, NoExplanation{ + BaseIssue: base, + fullDirectiveWithoutExplanation: fullDirectiveWithoutExplanation, + }) + } } } } } } + return issues, nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go new file mode 100644 index 0000000..14ddf36 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/kunwardeep/paralleltest/pkg/paralleltest" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewParallelTest() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + paralleltest.NewAnalyzer(), + } + + return goanalysis.NewLinter( + "paralleltest", + "paralleltest detects missing usage of t.Parallel() method in your Go test", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go new file mode 100644 index 0000000..caccd48 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go @@ -0,0 +1,26 @@ +package golinters + +import ( + "github.com/nishanths/predeclared/passes/predeclared" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewPredeclared(settings *config.PredeclaredSettings) *goanalysis.Linter { + a := predeclared.Analyzer + + var cfg map[string]map[string]interface{} + if settings != nil { + cfg = map[string]map[string]interface{}{ + a.Name: { + predeclared.IgnoreFlag: settings.Ignore, + predeclared.QualifiedFlag: settings.Qualified, + }, + } + } + + return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, cfg). + WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go index 258912e..ba3921e 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go @@ -162,7 +162,6 @@ func (f *Node) Visit(node ast.Node) ast.Visitor { // The variadic arguments may start with link and category types, // and must end with a format string and any arguments. -// It returns the new Problem. //nolint:interfacer func (f *Node) errorf(n ast.Node, format string, args ...interface{}) { pos := f.fset.Position(n.Pos()) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go new file mode 100644 index 0000000..48ca246 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/ryanrolds/sqlclosecheck/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewSQLCloseCheck() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + analyzer.NewAnalyzer(), + } + + return goanalysis.NewLinter( + "sqlclosecheck", + "Checks that sql.Rows and sql.Stmt are closed.", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go new file mode 100644 index 0000000..27e5ee3 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go @@ -0,0 +1,51 @@ +package golinters + +import ( + "strings" + + "github.com/kulti/thelper/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter { + a := analyzer.NewAnalyzer() + + cfgMap := map[string]map[string]interface{}{} + if cfg != nil { + var opts []string + + if cfg.Test.Name { + opts = append(opts, "t_name") + } + if cfg.Test.Begin { + opts = append(opts, "t_begin") + } + if cfg.Test.First { + opts = append(opts, "t_first") + } + + if cfg.Benchmark.Name { + opts = append(opts, "b_name") + } + if cfg.Benchmark.Begin { + opts = append(opts, "b_begin") + } + if cfg.Benchmark.First { + opts = append(opts, "b_first") + } + + cfgMap[a.Name] = map[string]interface{}{ + "checks": strings.Join(opts, ","), + } + } + + return goanalysis.NewLinter( + "thelper", + "thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers", + []*analysis.Analyzer{a}, + cfgMap, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go new file mode 100644 index 0000000..a4b96eb --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/moricho/tparallel" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewTparallel() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + tparallel.Analyzer, + } + + return goanalysis.NewLinter( + "tparallel", + "tparallel detects inappropriate usage of t.Parallel() method in your Go test codes", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go new file mode 100644 index 0000000..e1592e5 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/tomarrell/wrapcheck/wrapcheck" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +const wrapcheckName = "wrapcheck" + +func NewWrapcheck() *goanalysis.Linter { + return goanalysis.NewLinter( + wrapcheckName, + wrapcheck.Analyzer.Doc, + []*analysis.Analyzer{wrapcheck.Analyzer}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go index 1ec78de..fb39961 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go @@ -3,8 +3,10 @@ package lintersdb import ( "fmt" "os" + "path/filepath" "plugin" + "github.com/spf13/viper" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" @@ -56,8 +58,10 @@ func (m *Manager) WithCustomLinters() *Manager { } func (Manager) AllPresets() []string { - return []string{linter.PresetBugs, linter.PresetComplexity, linter.PresetFormatting, - linter.PresetPerformance, linter.PresetStyle, linter.PresetUnused} + return []string{ + linter.PresetBugs, linter.PresetComplexity, linter.PresetFormatting, + linter.PresetPerformance, linter.PresetStyle, linter.PresetUnused, + } } func (m Manager) allPresetsSet() map[string]bool { @@ -87,9 +91,17 @@ func enableLinterConfigs(lcs []*linter.Config, isEnabled func(lc *linter.Config) func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { var govetCfg *config.GovetSettings var testpackageCfg *config.TestpackageSettings + var exhaustiveCfg *config.ExhaustiveSettings + var errorlintCfg *config.ErrorLintSettings + var thelperCfg *config.ThelperSettings + var predeclaredCfg *config.PredeclaredSettings if m.cfg != nil { govetCfg = &m.cfg.LintersSettings.Govet testpackageCfg = &m.cfg.LintersSettings.Testpackage + exhaustiveCfg = &m.cfg.LintersSettings.Exhaustive + errorlintCfg = &m.cfg.LintersSettings.ErrorLint + thelperCfg = &m.cfg.LintersSettings.Thelper + predeclaredCfg = &m.cfg.LintersSettings.Predeclared } const megacheckName = "megacheck" lcs := []*linter.Config{ @@ -102,6 +114,10 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithLoadForGoAnalysis(). WithPresets(linter.PresetPerformance, linter.PresetBugs). WithURL("https://github.com/timakin/bodyclose"), + linter.NewConfig(golinters.NewNoctx()). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetPerformance, linter.PresetBugs). + WithURL("https://github.com/sonatard/noctx"), linter.NewConfig(golinters.NewErrcheck()). WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs). @@ -189,10 +205,22 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithPresets(linter.PresetFormatting). WithAutoFix(). WithURL("https://golang.org/cmd/gofmt/"), + linter.NewConfig(golinters.NewGofumpt()). + WithPresets(linter.PresetFormatting). + WithAutoFix(). + WithURL("https://github.com/mvdan/gofumpt"), linter.NewConfig(golinters.NewGoimports()). WithPresets(linter.PresetFormatting). WithAutoFix(). WithURL("https://godoc.org/golang.org/x/tools/cmd/goimports"), + linter.NewConfig(golinters.NewGoHeader()). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/denis-tingajkin/go-header"), + linter.NewConfig(golinters.NewGci()). + WithLoadForGoAnalysis(). + WithAutoFix(). + WithURL("https://github.com/daixiang0/gci"), linter.NewConfig(golinters.NewMaligned()). WithLoadForGoAnalysis(). WithPresets(linter.PresetPerformance). @@ -263,6 +291,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithURL("https://github.com/ryancurrah/gomodguard"), linter.NewConfig(golinters.NewGodot()). WithPresets(linter.PresetStyle). + WithAutoFix(). WithURL("https://github.com/tetafro/godot"), linter.NewConfig(golinters.NewTestpackage(testpackageCfg)). WithPresets(linter.PresetStyle). @@ -271,10 +300,61 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { linter.NewConfig(golinters.NewNestif()). WithPresets(linter.PresetComplexity). WithURL("https://github.com/nakabonne/nestif"), + linter.NewConfig(golinters.NewExportLoopRef()). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/kyoh86/exportloopref"), + linter.NewConfig(golinters.NewExhaustive(exhaustiveCfg)). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/nishanths/exhaustive"), + linter.NewConfig(golinters.NewSQLCloseCheck()). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/ryanrolds/sqlclosecheck"), + linter.NewConfig(golinters.NewNLReturn()). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/ssgreg/nlreturn"), + linter.NewConfig(golinters.NewWrapcheck()). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/tomarrell/wrapcheck"), + linter.NewConfig(golinters.NewThelper(thelperCfg)). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/kulti/thelper"), + linter.NewConfig(golinters.NewTparallel()). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/moricho/tparallel"), + linter.NewConfig(golinters.NewExhaustiveStruct()). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/mbilski/exhaustivestruct"), + linter.NewConfig(golinters.NewErrorLint(errorlintCfg)). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/polyfloyd/go-errorlint"), + linter.NewConfig(golinters.NewParallelTest()). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/kunwardeep/paralleltest"), + linter.NewConfig(golinters.NewMakezero()). + WithPresets(linter.PresetStyle, linter.PresetBugs). + WithURL("https://github.com/ashanbrown/makezero"), + linter.NewConfig(golinters.NewForbidigo()). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/ashanbrown/forbidigo"), + linter.NewConfig(golinters.NewIfshort()). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/esimonov/ifshort"), + linter.NewConfig(golinters.NewPredeclared(predeclaredCfg)). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/nishanths/predeclared"), + // nolintlint must be last because it looks at the results of all the previous linters for unused nolint directives linter.NewConfig(golinters.NewNoLintLint()). WithPresets(linter.PresetStyle). - WithURL("https://github.com/golangci-lint/pkg/golinters/nolintlint"), + WithURL("https://github.com/golangci/golangci-lint/blob/master/pkg/golinters/nolintlint/README.md"), } isLocalRun := os.Getenv("GOLANGCI_COM_RUN") == "" @@ -355,6 +435,16 @@ type AnalyzerPlugin interface { } func (m Manager) getAnalyzerPlugin(path string) (AnalyzerPlugin, error) { + if !filepath.IsAbs(path) { + // resolve non-absolute paths relative to config file's directory + configFilePath := viper.ConfigFileUsed() + absConfigFilePath, err := filepath.Abs(configFilePath) + if err != nil { + return nil, fmt.Errorf("could not get absolute representation of config file path %q: %v", configFilePath, err) + } + path = filepath.Join(filepath.Dir(absConfigFilePath), path) + } + plug, err := plugin.Open(path) if err != nil { return nil, err diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go index d7e3699..ed731a9 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go @@ -20,12 +20,20 @@ func NewValidator(m *Manager) *Validator { func (v Validator) validateLintersNames(cfg *config.Linters) error { allNames := append([]string{}, cfg.Enable...) allNames = append(allNames, cfg.Disable...) + + unknownNames := []string{} + for _, name := range allNames { if v.m.GetLinterConfigs(name) == nil { - return fmt.Errorf("no such linter %q", name) + unknownNames = append(unknownNames, name) } } + if len(unknownNames) > 0 { + return fmt.Errorf("unknown linters: '%v', run 'golangci-lint linters' to see the list of supported linters", + strings.Join(unknownNames, ",")) + } + return nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go index e2928a5..69852af 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go @@ -104,7 +104,7 @@ func (cl *ContextLoader) makeBuildFlags() ([]string, error) { mod := cl.cfg.Run.ModulesDownloadMode if mod != "" { // go help modules - allowedMods := []string{"release", "readonly", "vendor"} + allowedMods := []string{"mod", "readonly", "vendor"} var ok bool for _, am := range allowedMods { if am == mod { @@ -199,7 +199,14 @@ func (cl *ContextLoader) loadPackages(ctx context.Context, loadMode packages.Loa cl.debugf("Built loader args are %s", args) pkgs, err := packages.Load(conf, args...) if err != nil { - return nil, errors.Wrap(err, "failed to load program with go/packages") + return nil, errors.Wrap(err, "failed to load with go/packages") + } + + // Currently, go/packages doesn't guarantee that error will be returned + // if context was canceled. See + // https://github.com/golang/tools/commit/c5cec6710e927457c3c29d6c156415e8539a5111#r39261855 + if ctx.Err() != nil { + return nil, errors.Wrap(ctx.Err(), "timed out to load packages") } if loadMode&packages.NeedSyntax == 0 { @@ -280,7 +287,7 @@ func (cl *ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*l loadMode := cl.findLoadMode(linters) pkgs, err := cl.loadPackages(ctx, loadMode) if err != nil { - return nil, err + return nil, errors.Wrap(err, "failed to load packages") } deduplicatedPkgs := cl.filterDuplicatePackages(pkgs) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go index 05dc51b..63e6ad7 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go @@ -31,24 +31,6 @@ type Runner struct { func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, es *lintersdb.EnabledSet, lineCache *fsutils.LineCache, dbManager *lintersdb.Manager, pkgs []*gopackages.Package) (*Runner, error) { - icfg := cfg.Issues - excludePatterns := icfg.ExcludePatterns - if icfg.UseDefaultExcludes { - excludePatterns = append(excludePatterns, config.GetExcludePatternsStrings(icfg.IncludeDefaultExcludes)...) - } - - var excludeTotalPattern string - if len(excludePatterns) != 0 { - excludeTotalPattern = fmt.Sprintf("(%s)", strings.Join(excludePatterns, "|")) - } - - var excludeProcessor processors.Processor - if cfg.Issues.ExcludeCaseSensitive { - excludeProcessor = processors.NewExcludeCaseSensitive(excludeTotalPattern) - } else { - excludeProcessor = processors.NewExclude(excludeTotalPattern) - } - skipFilesProcessor, err := processors.NewSkipFiles(cfg.Run.SkipFiles) if err != nil { return nil, err @@ -63,22 +45,6 @@ func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, es *lint return nil, err } - var excludeRules []processors.ExcludeRule - for _, r := range icfg.ExcludeRules { - excludeRules = append(excludeRules, processors.ExcludeRule{ - Text: r.Text, - Source: r.Source, - Path: r.Path, - Linters: r.Linters, - }) - } - var excludeRulesProcessor processors.Processor - if cfg.Issues.ExcludeCaseSensitive { - excludeRulesProcessor = processors.NewExcludeRulesCaseSensitive(excludeRules, lineCache, log.Child("exclude_rules")) - } else { - excludeRulesProcessor = processors.NewExcludeRules(excludeRules, lineCache, log.Child("exclude_rules")) - } - enabledLinters, err := es.GetEnabledLintersMap() if err != nil { return nil, errors.Wrap(err, "failed to get enabled linters") @@ -101,17 +67,20 @@ func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, es *lint // Must be before exclude because users see already marked output and configure excluding by it. processors.NewIdentifierMarker(), - excludeProcessor, - excludeRulesProcessor, + getExcludeProcessor(&cfg.Issues), + getExcludeRulesProcessor(&cfg.Issues, log, lineCache), processors.NewNolint(log.Child("nolint"), dbManager, enabledLinters), processors.NewUniqByLine(cfg), - processors.NewDiff(icfg.Diff, icfg.DiffFromRevision, icfg.DiffPatchFilePath), + processors.NewDiff(cfg.Issues.Diff, cfg.Issues.DiffFromRevision, cfg.Issues.DiffPatchFilePath), processors.NewMaxPerFileFromLinter(cfg), - processors.NewMaxSameIssues(icfg.MaxSameIssues, log.Child("max_same_issues"), cfg), - processors.NewMaxFromLinter(icfg.MaxIssuesPerLinter, log.Child("max_from_linter"), cfg), + processors.NewMaxSameIssues(cfg.Issues.MaxSameIssues, log.Child("max_same_issues"), cfg), + processors.NewMaxFromLinter(cfg.Issues.MaxIssuesPerLinter, log.Child("max_from_linter"), cfg), processors.NewSourceCode(lineCache, log.Child("source_code")), processors.NewPathShortener(), + getSeverityRulesProcessor(&cfg.Severity, log, lineCache), + processors.NewPathPrefixer(cfg.Output.PathPrefix), + processors.NewSortResults(cfg), }, Log: log, }, nil @@ -122,6 +91,8 @@ func (r *Runner) runLinterSafe(ctx context.Context, lintCtx *linter.Context, defer func() { if panicData := recover(); panicData != nil { if pe, ok := panicData.(*errorutil.PanicError); ok { + err = fmt.Errorf("%s: %w", lc.Name(), pe) + // Don't print stacktrace from goroutines twice lintCtx.Log.Warnf("Panic: %s: %s", pe, pe.Stack()) } else { @@ -254,3 +225,96 @@ func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, s return issues } + +func getExcludeProcessor(cfg *config.Issues) processors.Processor { + var excludeTotalPattern string + excludeGlobalPatterns := cfg.ExcludePatterns + if len(excludeGlobalPatterns) != 0 { + excludeTotalPattern = fmt.Sprintf("(%s)", strings.Join(excludeGlobalPatterns, "|")) + } + + var excludeProcessor processors.Processor + if cfg.ExcludeCaseSensitive { + excludeProcessor = processors.NewExcludeCaseSensitive(excludeTotalPattern) + } else { + excludeProcessor = processors.NewExclude(excludeTotalPattern) + } + + return excludeProcessor +} + +func getExcludeRulesProcessor(cfg *config.Issues, log logutils.Log, lineCache *fsutils.LineCache) processors.Processor { + var excludeRules []processors.ExcludeRule + for _, r := range cfg.ExcludeRules { + excludeRules = append(excludeRules, processors.ExcludeRule{ + BaseRule: processors.BaseRule{ + Text: r.Text, + Source: r.Source, + Path: r.Path, + Linters: r.Linters, + }, + }) + } + + if cfg.UseDefaultExcludes { + for _, r := range config.GetExcludePatterns(cfg.IncludeDefaultExcludes) { + excludeRules = append(excludeRules, processors.ExcludeRule{ + BaseRule: processors.BaseRule{ + Text: r.Pattern, + Linters: []string{r.Linter}, + }, + }) + } + } + + var excludeRulesProcessor processors.Processor + if cfg.ExcludeCaseSensitive { + excludeRulesProcessor = processors.NewExcludeRulesCaseSensitive( + excludeRules, + lineCache, + log.Child("exclude_rules"), + ) + } else { + excludeRulesProcessor = processors.NewExcludeRules( + excludeRules, + lineCache, + log.Child("exclude_rules"), + ) + } + + return excludeRulesProcessor +} + +func getSeverityRulesProcessor(cfg *config.Severity, log logutils.Log, lineCache *fsutils.LineCache) processors.Processor { + var severityRules []processors.SeverityRule + for _, r := range cfg.Rules { + severityRules = append(severityRules, processors.SeverityRule{ + Severity: r.Severity, + BaseRule: processors.BaseRule{ + Text: r.Text, + Source: r.Source, + Path: r.Path, + Linters: r.Linters, + }, + }) + } + + var severityRulesProcessor processors.Processor + if cfg.CaseSensitive { + severityRulesProcessor = processors.NewSeverityRulesCaseSensitive( + cfg.Default, + severityRules, + lineCache, + log.Child("severity_rules"), + ) + } else { + severityRulesProcessor = processors.NewSeverityRules( + cfg.Default, + severityRules, + lineCache, + log.Child("severity_rules"), + ) + } + + return severityRulesProcessor +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go index f36bc10..c5b948a 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go @@ -5,6 +5,8 @@ import ( "encoding/xml" "fmt" + "github.com/go-xmlfmt/xmlfmt" + "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -28,7 +30,7 @@ type checkstyleError struct { Source string `xml:"source,attr"` } -const defaultSeverity = "error" +const defaultCheckstyleSeverity = "error" type Checkstyle struct{} @@ -54,12 +56,17 @@ func (Checkstyle) Print(ctx context.Context, issues []result.Issue) error { files[issue.FilePath()] = file } + severity := defaultCheckstyleSeverity + if issue.Severity != "" { + severity = issue.Severity + } + newError := &checkstyleError{ Column: issue.Column(), Line: issue.Line(), Message: issue.Text, Source: issue.FromLinter, - Severity: defaultSeverity, + Severity: severity, } file.Errors = append(file.Errors, newError) @@ -75,6 +82,6 @@ func (Checkstyle) Print(ctx context.Context, issues []result.Issue) error { return err } - fmt.Fprintf(logutils.StdOut, "%s%s\n", xml.Header, data) + fmt.Fprintf(logutils.StdOut, "%s%s\n", xml.Header, xmlfmt.FormatXML(string(data), "", " ")) return nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go index 5d45c4e..35a22ce 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go @@ -2,7 +2,6 @@ package printers import ( "context" - "crypto/md5" //nolint:gosec "encoding/json" "fmt" @@ -14,6 +13,7 @@ import ( // It is just enough to support GitLab CI Code Quality - https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html type CodeClimateIssue struct { Description string `json:"description"` + Severity string `json:"severity,omitempty"` Fingerprint string `json:"fingerprint"` Location struct { Path string `json:"path"` @@ -31,28 +31,23 @@ func NewCodeClimate() *CodeClimate { } func (p CodeClimate) Print(ctx context.Context, issues []result.Issue) error { - allIssues := []CodeClimateIssue{} - for ind := range issues { - i := &issues[ind] - var issue CodeClimateIssue - issue.Description = i.FromLinter + ": " + i.Text - issue.Location.Path = i.Pos.Filename - issue.Location.Lines.Begin = i.Pos.Line - - // Need a checksum of the issue, so we use MD5 of the filename, text, and first line of source if there is any - var firstLine string - if len(i.SourceLines) > 0 { - firstLine = i.SourceLines[0] + codeClimateIssues := []CodeClimateIssue{} + for i := range issues { + issue := &issues[i] + codeClimateIssue := CodeClimateIssue{} + codeClimateIssue.Description = issue.Description() + codeClimateIssue.Location.Path = issue.Pos.Filename + codeClimateIssue.Location.Lines.Begin = issue.Pos.Line + codeClimateIssue.Fingerprint = issue.Fingerprint() + + if issue.Severity != "" { + codeClimateIssue.Severity = issue.Severity } - hash := md5.New() //nolint:gosec - _, _ = hash.Write([]byte(i.Pos.Filename + i.Text + firstLine)) - issue.Fingerprint = fmt.Sprintf("%X", hash.Sum(nil)) - - allIssues = append(allIssues, issue) + codeClimateIssues = append(codeClimateIssues, codeClimateIssue) } - outputJSON, err := json.Marshal(allIssues) + outputJSON, err := json.Marshal(codeClimateIssues) if err != nil { return err } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go index fa11a28..4ebc266 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go @@ -11,7 +11,9 @@ import ( type github struct { } -// Github output format outputs issues according to Github actions format: +const defaultGithubSeverity = "error" + +// NewGithub output format outputs issues according to Github actions format: // https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message func NewGithub() Printer { return &github{} @@ -19,7 +21,12 @@ func NewGithub() Printer { // print each line as: ::error file=app.js,line=10,col=15::Something went wrong func formatIssueAsGithub(issue *result.Issue) string { - ret := fmt.Sprintf("::error file=%s,line=%d", issue.FilePath(), issue.Line()) + severity := defaultGithubSeverity + if issue.Severity != "" { + severity = issue.Severity + } + + ret := fmt.Sprintf("::%s file=%s,line=%d", severity, issue.FilePath(), issue.Line()) if issue.Pos.Column != 0 { ret += fmt.Sprintf(",col=%d", issue.Pos.Column) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go index 2834992..1814528 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go @@ -3,6 +3,7 @@ package printers import ( "context" "fmt" + "strings" "github.com/fatih/color" @@ -52,7 +53,7 @@ func (p *Text) Print(ctx context.Context, issues []result.Issue) error { } func (p Text) printIssue(i *result.Issue) { - text := p.SprintfColored(color.FgRed, "%s", i.Text) + text := p.SprintfColored(color.FgRed, "%s", strings.TrimSpace(i.Text)) if p.printLinterName { text += fmt.Sprintf(" (%s)", i.FromLinter) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go b/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go index 16d9a8a..eafdbc4 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go @@ -1,6 +1,8 @@ package result import ( + "crypto/md5" //nolint:gosec + "fmt" "go/token" "golang.org/x/tools/go/packages" @@ -26,6 +28,8 @@ type Issue struct { FromLinter string Text string + Severity string + // Source lines of a code with the issue to show SourceLines []string @@ -76,3 +80,19 @@ func (i *Issue) GetLineRange() Range { return *i.LineRange } + +func (i *Issue) Description() string { + return fmt.Sprintf("%s: %s", i.FromLinter, i.Text) +} + +func (i *Issue) Fingerprint() string { + firstLine := "" + if len(i.SourceLines) > 0 { + firstLine = i.SourceLines[0] + } + + hash := md5.New() //nolint:gosec + _, _ = hash.Write([]byte(fmt.Sprintf("%s%s%s", i.Pos.Filename, i.Text, firstLine))) + + return fmt.Sprintf("%X", hash.Sum(nil)) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go index 249ba9d..7894bbb 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go @@ -1,9 +1,9 @@ package processors import ( - "bufio" "fmt" - "os" + "go/parser" + "go/token" "path/filepath" "strings" @@ -43,8 +43,8 @@ func (p *AutogeneratedExclude) Process(issues []result.Issue) ([]result.Issue, e func isSpecialAutogeneratedFile(filePath string) bool { fileName := filepath.Base(filePath) - // fake files to which //line points to for goyacc generated files - return fileName == "yacctab" || fileName == "yaccpar" || fileName == "NONE" + // fake files or generation definitions to which //line points to for generated files + return filepath.Ext(fileName) != ".go" } func (p *AutogeneratedExclude) shouldPassIssue(i *result.Issue) (bool, error) { @@ -113,37 +113,15 @@ func (p *AutogeneratedExclude) getOrCreateFileSummary(i *result.Issue) (*ageFile } func getDoc(filePath string) (string, error) { - file, err := os.Open(filePath) + fset := token.NewFileSet() + syntax, err := parser.ParseFile(fset, filePath, nil, parser.PackageClauseOnly|parser.ParseComments) if err != nil { - return "", errors.Wrap(err, "failed to open file") + return "", errors.Wrap(err, "failed to parse file") } - defer file.Close() - - scanner := bufio.NewScanner(file) - - // Issue 954: Some lines can be very long, e.g. auto-generated - // embedded resources. Reported on file of 86.2KB. - const ( - maxSize = 10 * 1024 * 1024 // 10MB should be enough - initialSize = 4096 // same as startBufSize in bufio - ) - scanner.Buffer(make([]byte, initialSize), maxSize) var docLines []string - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if strings.HasPrefix(line, "//") { - text := strings.TrimSpace(strings.TrimPrefix(line, "//")) - docLines = append(docLines, text) - } else if line == "" || strings.HasPrefix(line, "package") { - // go to next line - } else { - break - } - } - - if err := scanner.Err(); err != nil { - return "", errors.Wrap(err, "failed to scan file") + for _, c := range syntax.Comments { + docLines = append(docLines, strings.TrimSpace(c.Text())) } return strings.Join(docLines, "\n"), nil diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go new file mode 100644 index 0000000..b6ce4f2 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go @@ -0,0 +1,69 @@ +package processors + +import ( + "regexp" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type BaseRule struct { + Text string + Source string + Path string + Linters []string +} + +type baseRule struct { + text *regexp.Regexp + source *regexp.Regexp + path *regexp.Regexp + linters []string +} + +func (r *baseRule) isEmpty() bool { + return r.text == nil && r.source == nil && r.path == nil && len(r.linters) == 0 +} + +func (r *baseRule) match(issue *result.Issue, lineCache *fsutils.LineCache, log logutils.Log) bool { + if r.isEmpty() { + return false + } + if r.text != nil && !r.text.MatchString(issue.Text) { + return false + } + if r.path != nil && !r.path.MatchString(issue.FilePath()) { + return false + } + if len(r.linters) != 0 && !r.matchLinter(issue) { + return false + } + + // the most heavyweight checking last + if r.source != nil && !r.matchSource(issue, lineCache, log) { + return false + } + + return true +} + +func (r *baseRule) matchLinter(issue *result.Issue) bool { + for _, linter := range r.linters { + if linter == issue.FromLinter { + return true + } + } + + return false +} + +func (r *baseRule) matchSource(issue *result.Issue, lineCache *fsutils.LineCache, log logutils.Log) bool { // nolint:interfacer + sourceLine, errSourceLine := lineCache.GetLine(issue.FilePath(), issue.Line()) + if errSourceLine != nil { + log.Warnf("Failed to get line %s:%d from line cache: %s", issue.FilePath(), issue.Line(), errSourceLine) + return false // can't properly match + } + + return r.source.MatchString(sourceLine) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go index b926af5..d4d6569 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go @@ -9,21 +9,11 @@ import ( ) type excludeRule struct { - text *regexp.Regexp - source *regexp.Regexp - path *regexp.Regexp - linters []string -} - -func (r *excludeRule) isEmpty() bool { - return r.text == nil && r.path == nil && len(r.linters) == 0 + baseRule } type ExcludeRule struct { - Text string - Source string - Path string - Linters []string + BaseRule } type ExcludeRules struct { @@ -45,9 +35,8 @@ func NewExcludeRules(rules []ExcludeRule, lineCache *fsutils.LineCache, log logu func createRules(rules []ExcludeRule, prefix string) []excludeRule { parsedRules := make([]excludeRule, 0, len(rules)) for _, rule := range rules { - parsedRule := excludeRule{ - linters: rule.Linters, - } + parsedRule := excludeRule{} + parsedRule.linters = rule.Linters if rule.Text != "" { parsedRule.text = regexp.MustCompile(prefix + rule.Text) } @@ -69,7 +58,7 @@ func (p ExcludeRules) Process(issues []result.Issue) ([]result.Issue, error) { return filterIssues(issues, func(i *result.Issue) bool { for _, rule := range p.rules { rule := rule - if p.match(i, &rule) { + if rule.match(i, p.lineCache, p.log) { return false } } @@ -77,48 +66,6 @@ func (p ExcludeRules) Process(issues []result.Issue) ([]result.Issue, error) { }), nil } -func (p ExcludeRules) matchLinter(i *result.Issue, r *excludeRule) bool { - for _, linter := range r.linters { - if linter == i.FromLinter { - return true - } - } - - return false -} - -func (p ExcludeRules) matchSource(i *result.Issue, r *excludeRule) bool { //nolint:interfacer - sourceLine, err := p.lineCache.GetLine(i.FilePath(), i.Line()) - if err != nil { - p.log.Warnf("Failed to get line %s:%d from line cache: %s", i.FilePath(), i.Line(), err) - return false // can't properly match - } - - return r.source.MatchString(sourceLine) -} - -func (p ExcludeRules) match(i *result.Issue, r *excludeRule) bool { - if r.isEmpty() { - return false - } - if r.text != nil && !r.text.MatchString(i.Text) { - return false - } - if r.path != nil && !r.path.MatchString(i.FilePath()) { - return false - } - if len(r.linters) != 0 && !p.matchLinter(i, r) { - return false - } - - // the most heavyweight checking last - if r.source != nil && !p.matchSource(i, r) { - return false - } - - return true -} - func (ExcludeRules) Name() string { return "exclude-rules" } func (ExcludeRules) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go index 5e692ce..9654024 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go @@ -16,6 +16,11 @@ import ( type posMapper func(pos token.Position) token.Position +type adjustMap struct { + sync.Mutex + m map[string]posMapper +} + // FilenameUnadjuster is needed because a lot of linters use fset.Position(f.Pos()) // to get filename. And they return adjusted filename (e.g. *.qtpl) for an issue. We need // restore real .go filename to properly output it, parse it, etc. @@ -27,7 +32,7 @@ type FilenameUnadjuster struct { var _ Processor = &FilenameUnadjuster{} -func processUnadjusterPkg(m map[string]posMapper, pkg *packages.Package, log logutils.Log) { +func processUnadjusterPkg(m *adjustMap, pkg *packages.Package, log logutils.Log) { fset := token.NewFileSet() // it's more memory efficient to not store all in one fset for _, filename := range pkg.CompiledGoFiles { @@ -36,7 +41,7 @@ func processUnadjusterPkg(m map[string]posMapper, pkg *packages.Package, log log } } -func processUnadjusterFile(filename string, m map[string]posMapper, log logutils.Log, fset *token.FileSet) { +func processUnadjusterFile(filename string, m *adjustMap, log logutils.Log, fset *token.FileSet) { syntax, err := parser.ParseFile(fset, filename, nil, parser.ParseComments) if err != nil { // Error will be reported by typecheck @@ -57,7 +62,9 @@ func processUnadjusterFile(filename string, m map[string]posMapper, log logutils return // file.go -> /caches/cgo-xxx } - m[adjustedFilename] = func(adjustedPos token.Position) token.Position { + m.Lock() + defer m.Unlock() + m.m[adjustedFilename] = func(adjustedPos token.Position) token.Position { tokenFile := fset.File(syntax.Pos()) if tokenFile == nil { log.Warnf("Failed to get token file for %s", adjustedFilename) @@ -68,22 +75,23 @@ func processUnadjusterFile(filename string, m map[string]posMapper, log logutils } func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *FilenameUnadjuster { - m := map[string]posMapper{} + m := adjustMap{m: map[string]posMapper{}} + startedAt := time.Now() var wg sync.WaitGroup wg.Add(len(pkgs)) for _, pkg := range pkgs { go func(pkg *packages.Package) { // It's important to call func here to run GC - processUnadjusterPkg(m, pkg, log) + processUnadjusterPkg(&m, pkg, log) wg.Done() }(pkg) } wg.Wait() - log.Infof("Pre-built %d adjustments in %s", len(m), time.Since(startedAt)) + log.Infof("Pre-built %d adjustments in %s", len(m.m), time.Since(startedAt)) return &FilenameUnadjuster{ - m: m, + m: m.m, log: log, loggedUnadjustments: map[string]bool{}, } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go index cda1017..9b292ed 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go @@ -250,14 +250,14 @@ func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *to var linters []string text = strings.Split(text, "//")[0] // allow another comment after this comment linterItems := strings.Split(strings.TrimPrefix(text, "nolint:"), ",") - var gotUnknownLinters bool for _, linter := range linterItems { linterName := strings.ToLower(strings.TrimSpace(linter)) lcs := p.dbManager.GetLinterConfigs(linterName) if lcs == nil { p.unknownLintersSet[linterName] = true - gotUnknownLinters = true + linters = append(linters, linterName) + nolintDebugf("unknown linter %s on line %d", linterName, fset.Position(g.Pos()).Line) continue } @@ -266,10 +266,6 @@ func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *to } } - if gotUnknownLinters { - return buildRange(nil) // ignore all linters to not annoy user - } - nolintDebugf("%d: linters are %s", fset.Position(g.Pos()).Line, linters) return buildRange(linters) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go new file mode 100644 index 0000000..5ce940b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go @@ -0,0 +1,37 @@ +package processors + +import ( + "path" + + "github.com/golangci/golangci-lint/pkg/result" +) + +// PathPrefixer adds a customizable prefix to every output path +type PathPrefixer struct { + prefix string +} + +var _ Processor = new(PathPrefixer) + +// NewPathPrefixer returns a new path prefixer for the provided string +func NewPathPrefixer(prefix string) *PathPrefixer { + return &PathPrefixer{prefix: prefix} +} + +// Name returns the name of this processor +func (*PathPrefixer) Name() string { + return "path_prefixer" +} + +// Process adds the prefix to each path +func (p *PathPrefixer) Process(issues []result.Issue) ([]result.Issue, error) { + if p.prefix != "" { + for i := range issues { + issues[i].Pos.Filename = path.Join(p.prefix, issues[i].Pos.Filename) + } + } + return issues, nil +} + +// Finish is implemented to satisfy the Processor interface +func (*PathPrefixer) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go new file mode 100644 index 0000000..7c9a4c1 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go @@ -0,0 +1,103 @@ +package processors + +import ( + "regexp" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type severityRule struct { + baseRule + severity string +} + +type SeverityRule struct { + BaseRule + Severity string +} + +type SeverityRules struct { + defaultSeverity string + rules []severityRule + lineCache *fsutils.LineCache + log logutils.Log +} + +func NewSeverityRules(defaultSeverity string, rules []SeverityRule, lineCache *fsutils.LineCache, log logutils.Log) *SeverityRules { + r := &SeverityRules{ + lineCache: lineCache, + log: log, + defaultSeverity: defaultSeverity, + } + r.rules = createSeverityRules(rules, "(?i)") + + return r +} + +func createSeverityRules(rules []SeverityRule, prefix string) []severityRule { + parsedRules := make([]severityRule, 0, len(rules)) + for _, rule := range rules { + parsedRule := severityRule{} + parsedRule.linters = rule.Linters + parsedRule.severity = rule.Severity + if rule.Text != "" { + parsedRule.text = regexp.MustCompile(prefix + rule.Text) + } + if rule.Source != "" { + parsedRule.source = regexp.MustCompile(prefix + rule.Source) + } + if rule.Path != "" { + parsedRule.path = regexp.MustCompile(rule.Path) + } + parsedRules = append(parsedRules, parsedRule) + } + return parsedRules +} + +func (p SeverityRules) Process(issues []result.Issue) ([]result.Issue, error) { + if len(p.rules) == 0 && p.defaultSeverity == "" { + return issues, nil + } + return transformIssues(issues, func(i *result.Issue) *result.Issue { + for _, rule := range p.rules { + rule := rule + + ruleSeverity := p.defaultSeverity + if rule.severity != "" { + ruleSeverity = rule.severity + } + + if rule.match(i, p.lineCache, p.log) { + i.Severity = ruleSeverity + return i + } + } + i.Severity = p.defaultSeverity + return i + }), nil +} + +func (SeverityRules) Name() string { return "severity-rules" } +func (SeverityRules) Finish() {} + +var _ Processor = SeverityRules{} + +type SeverityRulesCaseSensitive struct { + *SeverityRules +} + +func NewSeverityRulesCaseSensitive(defaultSeverity string, rules []SeverityRule, + lineCache *fsutils.LineCache, log logutils.Log) *SeverityRulesCaseSensitive { + r := &SeverityRules{ + lineCache: lineCache, + log: log, + defaultSeverity: defaultSeverity, + } + r.rules = createSeverityRules(rules, "") + + return &SeverityRulesCaseSensitive{r} +} + +func (SeverityRulesCaseSensitive) Name() string { return "severity-rules-case-sensitive" } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go new file mode 100644 index 0000000..e726c3a --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go @@ -0,0 +1,173 @@ +package processors + +import ( + "sort" + "strings" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/result" +) + +// Base propose of this functionality to sort results (issues) +// produced by various linters by analyzing code. We achieving this +// by sorting results.Issues using processor step, and chain based +// rules that can compare different properties of the Issues struct. + +var _ Processor = (*SortResults)(nil) + +type SortResults struct { + cmp comparator + cfg *config.Config +} + +func NewSortResults(cfg *config.Config) *SortResults { + // For sorting we are comparing (in next order): file names, line numbers, + // position, and finally - giving up. + return &SortResults{ + cmp: ByName{ + next: ByLine{ + next: ByColumn{}, + }, + }, + cfg: cfg, + } +} + +// Process is performing sorting of the result issues. +func (sr SortResults) Process(issues []result.Issue) ([]result.Issue, error) { + if !sr.cfg.Output.SortResults { + return issues, nil + } + + sort.Slice(issues, func(i, j int) bool { + return sr.cmp.Compare(&issues[i], &issues[j]) == Less + }) + + return issues, nil +} + +func (sr SortResults) Name() string { return "sort_results" } +func (sr SortResults) Finish() {} + +type compareResult int + +const ( + Less compareResult = iota - 1 + Equal + Greater + None +) + +func (c compareResult) isNeutral() bool { + // return true if compare result is incomparable or equal. + return c == None || c == Equal +} + +//nolint:exhaustive +func (c compareResult) String() string { + switch c { + case Less: + return "Less" + case Equal: + return "Equal" + case Greater: + return "Greater" + } + + return "None" +} + +// comparator describe how to implement compare for two "issues" lexicographically +type comparator interface { + Compare(a, b *result.Issue) compareResult + Next() comparator +} + +var ( + _ comparator = (*ByName)(nil) + _ comparator = (*ByLine)(nil) + _ comparator = (*ByColumn)(nil) +) + +type ByName struct{ next comparator } + +//nolint:golint +func (cmp ByName) Next() comparator { return cmp.next } + +//nolint:golint +func (cmp ByName) Compare(a, b *result.Issue) compareResult { + var res compareResult + + if res = compareResult(strings.Compare(a.FilePath(), b.FilePath())); !res.isNeutral() { + return res + } + + if next := cmp.Next(); next != nil { + return next.Compare(a, b) + } + + return res +} + +type ByLine struct{ next comparator } + +//nolint:golint +func (cmp ByLine) Next() comparator { return cmp.next } + +//nolint:golint +func (cmp ByLine) Compare(a, b *result.Issue) compareResult { + var res compareResult + + if res = numericCompare(a.Line(), b.Line()); !res.isNeutral() { + return res + } + + if next := cmp.Next(); next != nil { + return next.Compare(a, b) + } + + return res +} + +type ByColumn struct{ next comparator } + +//nolint:golint +func (cmp ByColumn) Next() comparator { return cmp.next } + +//nolint:golint +func (cmp ByColumn) Compare(a, b *result.Issue) compareResult { + var res compareResult + + if res = numericCompare(a.Column(), b.Column()); !res.isNeutral() { + return res + } + + if next := cmp.Next(); next != nil { + return next.Compare(a, b) + } + + return res +} + +func numericCompare(a, b int) compareResult { + var ( + isValuesInvalid = a < 0 || b < 0 + isZeroValuesBoth = a == 0 && b == 0 + isEqual = a == b + isZeroValueInA = b > 0 && a == 0 + isZeroValueInB = a > 0 && b == 0 + ) + + switch { + case isZeroValuesBoth || isEqual: + return Equal + case isValuesInvalid || isZeroValueInA || isZeroValueInB: + return None + case a > b: + return Greater + case a < b: + return Less + } + + return Equal +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go b/vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go new file mode 100644 index 0000000..cb89e34 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go @@ -0,0 +1,17 @@ +package sliceutil + +// IndexOf get the index of the given value in the given string slice, +// or -1 if not found. +func IndexOf(slice []string, value string) int { + for i, v := range slice { + if v == value { + return i + } + } + return -1 +} + +// Contains check if a string slice contains a value. +func Contains(slice []string, value string) bool { + return IndexOf(slice, value) != -1 +} diff --git a/vendor/github.com/google/go-cmp/cmp/compare.go b/vendor/github.com/google/go-cmp/cmp/compare.go index 580ae20..86d0903 100644 --- a/vendor/github.com/google/go-cmp/cmp/compare.go +++ b/vendor/github.com/google/go-cmp/cmp/compare.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // Package cmp determines equality of values. // @@ -95,13 +95,13 @@ func Equal(x, y interface{}, opts ...Option) bool { return s.result.Equal() } -// Diff returns a human-readable report of the differences between two values. -// It returns an empty string if and only if Equal returns true for the same -// input values and options. +// Diff returns a human-readable report of the differences between two values: +// y - x. It returns an empty string if and only if Equal returns true for the +// same input values and options. // // The output is displayed as a literal in pseudo-Go syntax. // At the start of each line, a "-" prefix indicates an element removed from x, -// a "+" prefix to indicates an element added to y, and the lack of a prefix +// a "+" prefix to indicates an element added from y, and the lack of a prefix // indicates an element common to both x and y. If possible, the output // uses fmt.Stringer.String or error.Error methods to produce more humanly // readable outputs. In such cases, the string is prefixed with either an diff --git a/vendor/github.com/google/go-cmp/cmp/export_panic.go b/vendor/github.com/google/go-cmp/cmp/export_panic.go index dfa5d21..5ff0b42 100644 --- a/vendor/github.com/google/go-cmp/cmp/export_panic.go +++ b/vendor/github.com/google/go-cmp/cmp/export_panic.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // +build purego diff --git a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go index 351f1a3..21eb548 100644 --- a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go +++ b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // +build !purego diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go index fe98dcc..1daaaac 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // +build !cmp_debug diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go index 597b6ae..4b91dbc 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // +build cmp_debug diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go index 730e223..bc196b1 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // Package diff implements an algorithm for producing edit-scripts. // The edit-script is a sequence of operations needed to transform one list @@ -119,7 +119,7 @@ func (r Result) Similar() bool { return r.NumSame+1 >= r.NumDiff } -var randInt = rand.New(rand.NewSource(time.Now().Unix())).Intn(2) +var randBool = rand.New(rand.NewSource(time.Now().Unix())).Intn(2) == 0 // Difference reports whether two lists of lengths nx and ny are equal // given the definition of equality provided as f. @@ -168,17 +168,6 @@ func Difference(nx, ny int, f EqualFunc) (es EditScript) { // A vertical edge is equivalent to inserting a symbol from list Y. // A diagonal edge is equivalent to a matching symbol between both X and Y. - // To ensure flexibility in changing the algorithm in the future, - // introduce some degree of deliberate instability. - // This is achieved by fiddling the zigzag iterator to start searching - // the graph starting from the bottom-right versus than the top-left. - // The result may differ depending on the starting search location, - // but still produces a valid edit script. - zigzagInit := randInt // either 0 or 1 - if flags.Deterministic { - zigzagInit = 0 - } - // Invariants: // • 0 ≤ fwdPath.X ≤ (fwdFrontier.X, revFrontier.X) ≤ revPath.X ≤ nx // • 0 ≤ fwdPath.Y ≤ (fwdFrontier.Y, revFrontier.Y) ≤ revPath.Y ≤ ny @@ -197,6 +186,11 @@ func Difference(nx, ny int, f EqualFunc) (es EditScript) { // approximately the square-root of the search budget. searchBudget := 4 * (nx + ny) // O(n) + // Running the tests with the "cmp_debug" build tag prints a visualization + // of the algorithm running in real-time. This is educational for + // understanding how the algorithm works. See debug_enable.go. + f = debug.Begin(nx, ny, f, &fwdPath.es, &revPath.es) + // The algorithm below is a greedy, meet-in-the-middle algorithm for // computing sub-optimal edit-scripts between two lists. // @@ -214,22 +208,28 @@ func Difference(nx, ny int, f EqualFunc) (es EditScript) { // frontier towards the opposite corner. // • This algorithm terminates when either the X coordinates or the // Y coordinates of the forward and reverse frontier points ever intersect. - // + // This algorithm is correct even if searching only in the forward direction // or in the reverse direction. We do both because it is commonly observed // that two lists commonly differ because elements were added to the front // or end of the other list. // - // Running the tests with the "cmp_debug" build tag prints a visualization - // of the algorithm running in real-time. This is educational for - // understanding how the algorithm works. See debug_enable.go. - f = debug.Begin(nx, ny, f, &fwdPath.es, &revPath.es) - for { + // Non-deterministically start with either the forward or reverse direction + // to introduce some deliberate instability so that we have the flexibility + // to change this algorithm in the future. + if flags.Deterministic || randBool { + goto forwardSearch + } else { + goto reverseSearch + } + +forwardSearch: + { // Forward search from the beginning. if fwdFrontier.X >= revFrontier.X || fwdFrontier.Y >= revFrontier.Y || searchBudget == 0 { - break + goto finishSearch } - for stop1, stop2, i := false, false, zigzagInit; !(stop1 && stop2) && searchBudget > 0; i++ { + for stop1, stop2, i := false, false, 0; !(stop1 && stop2) && searchBudget > 0; i++ { // Search in a diagonal pattern for a match. z := zigzag(i) p := point{fwdFrontier.X + z, fwdFrontier.Y - z} @@ -262,10 +262,14 @@ func Difference(nx, ny int, f EqualFunc) (es EditScript) { } else { fwdFrontier.Y++ } + goto reverseSearch + } +reverseSearch: + { // Reverse search from the end. if fwdFrontier.X >= revFrontier.X || fwdFrontier.Y >= revFrontier.Y || searchBudget == 0 { - break + goto finishSearch } for stop1, stop2, i := false, false, 0; !(stop1 && stop2) && searchBudget > 0; i++ { // Search in a diagonal pattern for a match. @@ -300,8 +304,10 @@ func Difference(nx, ny int, f EqualFunc) (es EditScript) { } else { revFrontier.Y-- } + goto forwardSearch } +finishSearch: // Join the forward and reverse paths and then append the reverse path. fwdPath.connect(revPath.point, f) for i := len(revPath.es) - 1; i >= 0; i-- { diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go index a9e7fc0..d8e459c 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go @@ -1,6 +1,6 @@ // Copyright 2019, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package flags diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go index 01aed0a..82d1d7f 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go @@ -1,6 +1,6 @@ // Copyright 2019, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // +build !go1.10 diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go index c0b667f..8646f05 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go @@ -1,6 +1,6 @@ // Copyright 2019, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // +build go1.10 diff --git a/vendor/github.com/google/go-cmp/cmp/internal/function/func.go b/vendor/github.com/google/go-cmp/cmp/internal/function/func.go index ace1dbe..d127d43 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/function/func.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/function/func.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // Package function provides functionality for identifying function types. package function diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/name.go b/vendor/github.com/google/go-cmp/cmp/internal/value/name.go index 8228e7d..b6c12ce 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/name.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/name.go @@ -1,6 +1,6 @@ // Copyright 2020, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package value diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go index e9e384a..44f4a5a 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go @@ -1,6 +1,6 @@ // Copyright 2018, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // +build purego diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go index b50c17e..a605953 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go @@ -1,6 +1,6 @@ // Copyright 2018, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. // +build !purego diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go b/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go index 24fbae6..98533b0 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package value diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go b/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go index 06a8ffd..9147a29 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package value diff --git a/vendor/github.com/google/go-cmp/cmp/options.go b/vendor/github.com/google/go-cmp/cmp/options.go index abbd2a6..e57b9eb 100644 --- a/vendor/github.com/google/go-cmp/cmp/options.go +++ b/vendor/github.com/google/go-cmp/cmp/options.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package cmp @@ -225,11 +225,14 @@ func (validator) apply(s *state, vx, vy reflect.Value) { // Unable to Interface implies unexported field without visibility access. if !vx.CanInterface() || !vy.CanInterface() { - const help = "consider using a custom Comparer; if you control the implementation of type, you can also consider using an Exporter, AllowUnexported, or cmpopts.IgnoreUnexported" + help := "consider using a custom Comparer; if you control the implementation of type, you can also consider using an Exporter, AllowUnexported, or cmpopts.IgnoreUnexported" var name string if t := s.curPath.Index(-2).Type(); t.Name() != "" { // Named type with unexported fields. name = fmt.Sprintf("%q.%v", t.PkgPath(), t.Name()) // e.g., "path/to/package".MyType + if _, ok := reflect.New(t).Interface().(error); ok { + help = "consider using cmpopts.EquateErrors to compare error values" + } } else { // Unnamed type with unexported fields. Derive PkgPath from field. var pkgPath string diff --git a/vendor/github.com/google/go-cmp/cmp/path.go b/vendor/github.com/google/go-cmp/cmp/path.go index 603dbb0..3d45c1a 100644 --- a/vendor/github.com/google/go-cmp/cmp/path.go +++ b/vendor/github.com/google/go-cmp/cmp/path.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package cmp diff --git a/vendor/github.com/google/go-cmp/cmp/report.go b/vendor/github.com/google/go-cmp/cmp/report.go index aafcb36..f43cd12 100644 --- a/vendor/github.com/google/go-cmp/cmp/report.go +++ b/vendor/github.com/google/go-cmp/cmp/report.go @@ -1,6 +1,6 @@ // Copyright 2017, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package cmp diff --git a/vendor/github.com/google/go-cmp/cmp/report_compare.go b/vendor/github.com/google/go-cmp/cmp/report_compare.go index 9e21809..a6c070c 100644 --- a/vendor/github.com/google/go-cmp/cmp/report_compare.go +++ b/vendor/github.com/google/go-cmp/cmp/report_compare.go @@ -1,6 +1,6 @@ // Copyright 2019, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package cmp diff --git a/vendor/github.com/google/go-cmp/cmp/report_references.go b/vendor/github.com/google/go-cmp/cmp/report_references.go index d620c2c..be31b33 100644 --- a/vendor/github.com/google/go-cmp/cmp/report_references.go +++ b/vendor/github.com/google/go-cmp/cmp/report_references.go @@ -1,6 +1,6 @@ // Copyright 2020, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package cmp diff --git a/vendor/github.com/google/go-cmp/cmp/report_reflect.go b/vendor/github.com/google/go-cmp/cmp/report_reflect.go index 2d722ea..33f0357 100644 --- a/vendor/github.com/google/go-cmp/cmp/report_reflect.go +++ b/vendor/github.com/google/go-cmp/cmp/report_reflect.go @@ -1,10 +1,11 @@ // Copyright 2019, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package cmp import ( + "bytes" "fmt" "reflect" "strconv" @@ -125,21 +126,20 @@ func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind, // implementations crash when doing so. if (t.Kind() != reflect.Ptr && t.Kind() != reflect.Interface) || !v.IsNil() { var prefix, strVal string - switch v := v.Interface().(type) { - case error: - prefix, strVal = "e", v.Error() - case fmt.Stringer: - prefix, strVal = "s", v.String() - } - if prefix != "" { - maxLen := len(strVal) - if opts.LimitVerbosity { - maxLen = (1 << opts.verbosity()) << 5 // 32, 64, 128, 256, etc... + func() { + // Swallow and ignore any panics from String or Error. + defer func() { recover() }() + switch v := v.Interface().(type) { + case error: + strVal = v.Error() + prefix = "e" + case fmt.Stringer: + strVal = v.String() + prefix = "s" } - if len(strVal) > maxLen+len(textEllipsis) { - return textLine(prefix + formatString(strVal[:maxLen]) + string(textEllipsis)) - } - return textLine(prefix + formatString(strVal)) + }() + if prefix != "" { + return opts.formatString(prefix, strVal) } } } @@ -171,14 +171,7 @@ func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind, case reflect.Complex64, reflect.Complex128: return textLine(fmt.Sprint(v.Complex())) case reflect.String: - maxLen := v.Len() - if opts.LimitVerbosity { - maxLen = (1 << opts.verbosity()) << 5 // 32, 64, 128, 256, etc... - } - if v.Len() > maxLen+len(textEllipsis) { - return textLine(formatString(v.String()[:maxLen]) + string(textEllipsis)) - } - return textLine(formatString(v.String())) + return opts.formatString("", v.String()) case reflect.UnsafePointer, reflect.Chan, reflect.Func: return textLine(formatPointer(value.PointerOf(v), true)) case reflect.Struct: @@ -210,6 +203,17 @@ func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind, if v.IsNil() { return textNil } + + // Check whether this is a []byte of text data. + if t.Elem() == reflect.TypeOf(byte(0)) { + b := v.Bytes() + isPrintSpace := func(r rune) bool { return unicode.IsPrint(r) && unicode.IsSpace(r) } + if len(b) > 0 && utf8.Valid(b) && len(bytes.TrimFunc(b, isPrintSpace)) == 0 { + out = opts.formatString("", string(b)) + return opts.WithTypeMode(emitType).FormatType(t, out) + } + } + fallthrough case reflect.Array: maxLen := v.Len() @@ -295,6 +299,49 @@ func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind, } } +func (opts formatOptions) formatString(prefix, s string) textNode { + maxLen := len(s) + maxLines := strings.Count(s, "\n") + 1 + if opts.LimitVerbosity { + maxLen = (1 << opts.verbosity()) << 5 // 32, 64, 128, 256, etc... + maxLines = (1 << opts.verbosity()) << 2 // 4, 8, 16, 32, 64, etc... + } + + // For multiline strings, use the triple-quote syntax, + // but only use it when printing removed or inserted nodes since + // we only want the extra verbosity for those cases. + lines := strings.Split(strings.TrimSuffix(s, "\n"), "\n") + isTripleQuoted := len(lines) >= 4 && (opts.DiffMode == '-' || opts.DiffMode == '+') + for i := 0; i < len(lines) && isTripleQuoted; i++ { + lines[i] = strings.TrimPrefix(strings.TrimSuffix(lines[i], "\r"), "\r") // trim leading/trailing carriage returns for legacy Windows endline support + isPrintable := func(r rune) bool { + return unicode.IsPrint(r) || r == '\t' // specially treat tab as printable + } + line := lines[i] + isTripleQuoted = !strings.HasPrefix(strings.TrimPrefix(line, prefix), `"""`) && !strings.HasPrefix(line, "...") && strings.TrimFunc(line, isPrintable) == "" && len(line) <= maxLen + } + if isTripleQuoted { + var list textList + list = append(list, textRecord{Diff: opts.DiffMode, Value: textLine(prefix + `"""`), ElideComma: true}) + for i, line := range lines { + if numElided := len(lines) - i; i == maxLines-1 && numElided > 1 { + comment := commentString(fmt.Sprintf("%d elided lines", numElided)) + list = append(list, textRecord{Diff: opts.DiffMode, Value: textEllipsis, ElideComma: true, Comment: comment}) + break + } + list = append(list, textRecord{Diff: opts.DiffMode, Value: textLine(line), ElideComma: true}) + } + list = append(list, textRecord{Diff: opts.DiffMode, Value: textLine(prefix + `"""`), ElideComma: true}) + return &textWrap{Prefix: "(", Value: list, Suffix: ")"} + } + + // Format the string as a single-line quoted string. + if len(s) > maxLen+len(textEllipsis) { + return textLine(prefix + formatString(s[:maxLen]) + string(textEllipsis)) + } + return textLine(prefix + formatString(s)) +} + // formatMapKey formats v as if it were a map key. // The result is guaranteed to be a single line. func formatMapKey(v reflect.Value, disambiguate bool, ptrs *pointerReferences) string { @@ -304,6 +351,8 @@ func formatMapKey(v reflect.Value, disambiguate bool, ptrs *pointerReferences) s opts.PrintAddresses = disambiguate opts.AvoidStringer = disambiguate opts.QualifiedNames = disambiguate + opts.VerbosityLevel = maxVerbosityPreset + opts.LimitVerbosity = true s := opts.FormatValue(v, reflect.Map, ptrs).String() return strings.TrimSpace(s) } diff --git a/vendor/github.com/google/go-cmp/cmp/report_slices.go b/vendor/github.com/google/go-cmp/cmp/report_slices.go index 35315da..da04caf 100644 --- a/vendor/github.com/google/go-cmp/cmp/report_slices.go +++ b/vendor/github.com/google/go-cmp/cmp/report_slices.go @@ -1,6 +1,6 @@ // Copyright 2019, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package cmp diff --git a/vendor/github.com/google/go-cmp/cmp/report_text.go b/vendor/github.com/google/go-cmp/cmp/report_text.go index 8b12c05..0fd46d7 100644 --- a/vendor/github.com/google/go-cmp/cmp/report_text.go +++ b/vendor/github.com/google/go-cmp/cmp/report_text.go @@ -1,6 +1,6 @@ // Copyright 2019, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package cmp diff --git a/vendor/github.com/google/go-cmp/cmp/report_value.go b/vendor/github.com/google/go-cmp/cmp/report_value.go index 83031a7..668d470 100644 --- a/vendor/github.com/google/go-cmp/cmp/report_value.go +++ b/vendor/github.com/google/go-cmp/cmp/report_value.go @@ -1,6 +1,6 @@ // Copyright 2019, The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE.md file. +// license that can be found in the LICENSE file. package cmp diff --git a/vendor/github.com/gostaticanalysis/analysisutil/README.md b/vendor/github.com/gostaticanalysis/analysisutil/README.md index c031e16..d8fd3d2 100644 --- a/vendor/github.com/gostaticanalysis/analysisutil/README.md +++ b/vendor/github.com/gostaticanalysis/analysisutil/README.md @@ -1,10 +1,5 @@ # analysisutil -[![godoc.org][godoc-badge]][godoc] +[![PkgGoDev](https://pkg.go.dev/badge/github.com/gostaticanalysis/analysisutil)](https://pkg.go.dev/github.com/gostaticanalysis/analysisutil) Utilities for x/tools/go/analysis package. - - -[godoc]: https://godoc.org/github.com/gostaticanalysis/analysisutil -[godoc-badge]: https://img.shields.io/badge/godoc-reference-4F73B3.svg?style=flat-square&label=%20godoc.org - diff --git a/vendor/github.com/gostaticanalysis/analysisutil/call.go b/vendor/github.com/gostaticanalysis/analysisutil/call.go new file mode 100644 index 0000000..e3d98d1 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/call.go @@ -0,0 +1,405 @@ +package analysisutil + +import ( + "go/types" + + "golang.org/x/tools/go/ssa" +) + +// CalledChecker checks a function is called. +// See From and Func. +type CalledChecker struct { + Ignore func(instr ssa.Instruction) bool +} + +// NotIn checks whether receiver's method is called in a function. +// If there is no methods calling at a path from an instruction +// which type is receiver to all return instruction, NotIn returns these instructions. +func (c *CalledChecker) NotIn(f *ssa.Function, receiver types.Type, methods ...*types.Func) []ssa.Instruction { + done := map[ssa.Value]bool{} + var instrs []ssa.Instruction + for _, b := range f.Blocks { + for i, instr := range b.Instrs { + v, _ := instr.(ssa.Value) + if v == nil || done[v] { + continue + } + + if v, _ := v.(*ssa.UnOp); v != nil && done[v.X] { + continue + } + + called, ok := c.From(b, i, receiver, methods...) + if ok && !called { + instrs = append(instrs, instr) + done[v] = true + if v, _ := v.(*ssa.UnOp); v != nil { + done[v.X] = true + } + } + } + } + return instrs +} + +// Func returns true when f is called in the instr. +// If recv is not nil, Func also checks the receiver. +func (c *CalledChecker) Func(instr ssa.Instruction, recv ssa.Value, f *types.Func) bool { + + if c.Ignore != nil && c.Ignore(instr) { + return false + } + + call, ok := instr.(ssa.CallInstruction) + if !ok { + return false + } + + common := call.Common() + if common == nil { + return false + } + + callee := common.StaticCallee() + if callee == nil { + return false + } + + fn, ok := callee.Object().(*types.Func) + if !ok { + return false + } + + if recv != nil && + common.Signature().Recv() != nil && + (len(common.Args) == 0 && recv != nil || common.Args[0] != recv && + !referrer(recv, common.Args[0])) { + return false + } + + return fn == f +} + +func referrer(a, b ssa.Value) bool { + return isReferrerOf(a, b) || isReferrerOf(b, a) +} + +func isReferrerOf(a, b ssa.Value) bool { + if a == nil || b == nil { + return false + } + if b.Referrers() != nil { + brs := *b.Referrers() + + for _, br := range brs { + brv, ok := br.(ssa.Value) + if !ok { + continue + } + if brv == a { + return true + } + } + } + return false +} + +// From checks whether receiver's method is called in an instruction +// which belogns to after i-th instructions, or in succsor blocks of b. +// The first result is above value. +// The second result is whether type of i-th instruction does not much receiver +// or matches with ignore cases. +func (c *CalledChecker) From(b *ssa.BasicBlock, i int, receiver types.Type, methods ...*types.Func) (called, ok bool) { + if b == nil || i < 0 || i >= len(b.Instrs) || + receiver == nil || len(methods) == 0 { + return false, false + } + + v, ok := b.Instrs[i].(ssa.Value) + if !ok { + return false, false + } + + from := &calledFrom{recv: v, fs: methods, ignore: c.Ignore} + + if !from.isRecv(receiver, v.Type()) { + return false, false + } + + if from.ignored() { + return false, false + } + + if from.instrs(b.Instrs[i+1:]) || + from.succs(b) { + return true, true + } + + from.done = nil + if from.storedInInstrs(b.Instrs[i+1:]) || + from.storedInSuccs(b) { + return false, false + } + + return false, true +} + +type calledFrom struct { + recv ssa.Value + fs []*types.Func + done map[*ssa.BasicBlock]bool + ignore func(ssa.Instruction) bool +} + +func (c *calledFrom) ignored() bool { + + switch v := c.recv.(type) { + case *ssa.UnOp: + switch v.X.(type) { + case *ssa.FreeVar, *ssa.Global: + return true + } + } + + refs := c.recv.Referrers() + if refs == nil { + return false + } + + for _, ref := range *refs { + done := map[ssa.Instruction]bool{} + if !c.isOwn(ref) && + ((c.ignore != nil && c.ignore(ref)) || + c.isRet(ref, done) || c.isArg(ref)) { + return true + } + } + + return false +} + +func (c *calledFrom) isOwn(instr ssa.Instruction) bool { + v, ok := instr.(ssa.Value) + if !ok { + return false + } + return v == c.recv +} + +func (c *calledFrom) isRet(instr ssa.Instruction, done map[ssa.Instruction]bool) bool { + if done[instr] { + return false + } + done[instr] = true + + switch instr := instr.(type) { + case *ssa.Return: + return true + case *ssa.MapUpdate: + return c.isRetInRefs(instr.Map, done) + case *ssa.Store: + if instr, _ := instr.Addr.(ssa.Instruction); instr != nil { + return c.isRet(instr, done) + } + return c.isRetInRefs(instr.Addr, done) + case *ssa.FieldAddr: + return c.isRetInRefs(instr.X, done) + case ssa.Value: + return c.isRetInRefs(instr, done) + default: + return false + } +} + +func (c *calledFrom) isRetInRefs(v ssa.Value, done map[ssa.Instruction]bool) bool { + refs := v.Referrers() + if refs == nil { + return false + } + for _, ref := range *refs { + if c.isRet(ref, done) { + return true + } + } + return false +} + +func (c *calledFrom) isArg(instr ssa.Instruction) bool { + + call, ok := instr.(ssa.CallInstruction) + if !ok { + return false + } + + common := call.Common() + if common == nil { + return false + } + + args := common.Args + if common.Signature().Recv() != nil { + args = args[1:] + } + + for i := range args { + if args[i] == c.recv { + return true + } + } + + return false +} + +func (c *calledFrom) instrs(instrs []ssa.Instruction) bool { + for _, instr := range instrs { + for _, f := range c.fs { + if Called(instr, c.recv, f) { + return true + } + } + } + return false +} + +func (c *calledFrom) succs(b *ssa.BasicBlock) bool { + if c.done == nil { + c.done = map[*ssa.BasicBlock]bool{} + } + + if c.done[b] { + return true + } + c.done[b] = true + + if len(b.Succs) == 0 { + return false + } + + for _, s := range b.Succs { + if !c.instrs(s.Instrs) && !c.succs(s) { + return false + } + } + + return true +} + +func (c *calledFrom) storedInInstrs(instrs []ssa.Instruction) bool { + for _, instr := range instrs { + switch instr := instr.(type) { + case *ssa.Store: + if instr.Val == c.recv { + return true + } + } + } + return false +} + +func (c *calledFrom) storedInSuccs(b *ssa.BasicBlock) bool { + if c.done == nil { + c.done = map[*ssa.BasicBlock]bool{} + } + + if c.done[b] { + return true + } + c.done[b] = true + + if len(b.Succs) == 0 { + return false + } + + for _, s := range b.Succs { + if !c.storedInInstrs(s.Instrs) && !c.succs(s) { + return false + } + } + + return true +} + +func (c *calledFrom) isRecv(recv, typ types.Type) bool { + return recv == typ || identical(recv, typ) || + c.isRecvInTuple(recv, typ) || c.isRecvInEmbedded(recv, typ) +} + +func (c *calledFrom) isRecvInTuple(recv, typ types.Type) bool { + tuple, _ := typ.(*types.Tuple) + if tuple == nil { + return false + } + + for i := 0; i < tuple.Len(); i++ { + if c.isRecv(recv, tuple.At(i).Type()) { + return true + } + } + + return false +} + +func (c *calledFrom) isRecvInEmbedded(recv, typ types.Type) bool { + + var st *types.Struct + switch typ := typ.(type) { + case *types.Struct: + st = typ + case *types.Pointer: + return c.isRecvInEmbedded(recv, typ.Elem()) + case *types.Named: + return c.isRecvInEmbedded(recv, typ.Underlying()) + default: + return false + } + + for i := 0; i < st.NumFields(); i++ { + field := st.Field(i) + if !field.Embedded() { + continue + } + + ft := field.Type() + if c.isRecv(recv, ft) { + return true + } + + var ptrOrUnptr types.Type + switch ft := ft.(type) { + case *types.Pointer: + // struct { *T } -> T + ptrOrUnptr = ft.Elem() + default: + // struct { T } -> *T + ptrOrUnptr = types.NewPointer(ft) + } + + if c.isRecv(recv, ptrOrUnptr) { + return true + } + } + + return false +} + +// NotCalledIn checks whether receiver's method is called in a function. +// If there is no methods calling at a path from an instruction +// which type is receiver to all return instruction, NotCalledIn returns these instructions. +func NotCalledIn(f *ssa.Function, receiver types.Type, methods ...*types.Func) []ssa.Instruction { + return new(CalledChecker).NotIn(f, receiver, methods...) +} + +// CalledFrom checks whether receiver's method is called in an instruction +// which belogns to after i-th instructions, or in succsor blocks of b. +// The first result is above value. +// The second result is whether type of i-th instruction does not much receiver +// or matches with ignore cases. +func CalledFrom(b *ssa.BasicBlock, i int, receiver types.Type, methods ...*types.Func) (called, ok bool) { + return new(CalledChecker).From(b, i, receiver, methods...) +} + +// Called returns true when f is called in the instr. +// If recv is not nil, Called also checks the receiver. +func Called(instr ssa.Instruction, recv ssa.Value, f *types.Func) bool { + return new(CalledChecker).Func(instr, recv, f) +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go b/vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go new file mode 100644 index 0000000..a911db6 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go @@ -0,0 +1,45 @@ +package analysisutil + +import ( + "go/token" + + "github.com/gostaticanalysis/comment" + "github.com/gostaticanalysis/comment/passes/commentmap" + "golang.org/x/tools/go/analysis" +) + +// ReportWithoutIgnore returns a report function which can set to (analysis.Pass).Report. +// The report function ignores a diagnostic which annotated by ignore comment as the below. +// //lint:ignore Check1[,Check2,...,CheckN] reason +// names is a list of checker names. +// If names was omitted, the report function ignores by pass.Analyzer.Name. +func ReportWithoutIgnore(pass *analysis.Pass, names ...string) func(analysis.Diagnostic) { + cmaps, _ := pass.ResultOf[commentmap.Analyzer].(comment.Maps) + if cmaps == nil { + cmaps = comment.New(pass.Fset, pass.Files) + } + + if len(names) == 0 { + names = []string{pass.Analyzer.Name} + } + + report := pass.Report // original report func + + return func(d analysis.Diagnostic) { + start := pass.Fset.File(d.Pos).Line(d.Pos) + end := start + if d.End != token.NoPos { + end = pass.Fset.File(d.End).Line(d.End) + } + + for l := start; l <= end; l++ { + for _, n := range names { + if cmaps.IgnoreLine(pass.Fset, l, n) { + return + } + } + } + + report(d) + } +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/go.mod b/vendor/github.com/gostaticanalysis/analysisutil/go.mod index 7e3e55b..5ca7c62 100644 --- a/vendor/github.com/gostaticanalysis/analysisutil/go.mod +++ b/vendor/github.com/gostaticanalysis/analysisutil/go.mod @@ -2,4 +2,7 @@ module github.com/gostaticanalysis/analysisutil go 1.12 -require golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5 +require ( + github.com/gostaticanalysis/comment v1.4.1 + golang.org/x/tools v0.0.0-20200820010801-b793a1359eac +) diff --git a/vendor/github.com/gostaticanalysis/analysisutil/go.sum b/vendor/github.com/gostaticanalysis/analysisutil/go.sum index 16c8787..134e67d 100644 --- a/vendor/github.com/gostaticanalysis/analysisutil/go.sum +++ b/vendor/github.com/gostaticanalysis/analysisutil/go.sum @@ -1,6 +1,37 @@ +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/gostaticanalysis/comment v1.3.0 h1:wTVgynbFu8/nz6SGgywA0TcyIoAVsYc7ai/Zp5xNGlw= +github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= +github.com/gostaticanalysis/comment v1.4.1 h1:xHopR5L2lRz6OsjH4R2HG5wRhW9ySl3FsHIvi5pcXwc= +github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5 h1:ZcPpqKMdoZeNQ/4GHlyY4COf8n8SmpPv6mcqF1+VPSM= -golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3 h1:2oZsfYnKfYzL4I57uYiRFsUf0bqlLkiuw8nnj3+voUA= +golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff h1:foic6oVZ4MKltJC6MXzuFZFswE7NCjjtc0Hxbyblawc= +golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac h1:DugppSxw0LSF8lcjaODPJZoDzq0ElTGskTst3ZaBkHI= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/gostaticanalysis/analysisutil/pkg.go b/vendor/github.com/gostaticanalysis/analysisutil/pkg.go index c98710d..b64150d 100644 --- a/vendor/github.com/gostaticanalysis/analysisutil/pkg.go +++ b/vendor/github.com/gostaticanalysis/analysisutil/pkg.go @@ -2,14 +2,17 @@ package analysisutil import ( "go/types" + "strconv" "strings" + + "golang.org/x/tools/go/analysis" ) -// RemoVendor removes vendoring infomation from import path. +// RemoVendor removes vendoring information from import path. func RemoveVendor(path string) string { - i := strings.Index(path, "vendor") + i := strings.Index(path, "vendor/") if i >= 0 { - return path[i+len("vendor")+1:] + return path[i+len("vendor/"):] } return path } @@ -24,3 +27,23 @@ func LookupFromImports(imports []*types.Package, path, name string) types.Object } return nil } + +// Imported returns true when the given pass imports the pkg. +func Imported(pkgPath string, pass *analysis.Pass) bool { + fs := pass.Files + if len(fs) == 0 { + return false + } + for _, f := range fs { + for _, i := range f.Imports { + path, err := strconv.Unquote(i.Path.Value) + if err != nil { + continue + } + if RemoveVendor(path) == pkgPath { + return true + } + } + } + return false +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/ssa.go b/vendor/github.com/gostaticanalysis/analysisutil/ssa.go index 3791586..517f6b9 100644 --- a/vendor/github.com/gostaticanalysis/analysisutil/ssa.go +++ b/vendor/github.com/gostaticanalysis/analysisutil/ssa.go @@ -1,6 +1,8 @@ package analysisutil -import "golang.org/x/tools/go/ssa" +import ( + "golang.org/x/tools/go/ssa" +) // IfInstr returns *ssa.If which is contained in the block b. // If the block b has not any if instruction, IfInstr returns nil. @@ -29,3 +31,116 @@ func Phi(b *ssa.BasicBlock) (phis []*ssa.Phi) { } return } + +// Returns returns a slice of *ssa.Return in the function. +func Returns(v ssa.Value) []*ssa.Return { + var fn *ssa.Function + switch v := v.(type) { + case *ssa.Function: + fn = v + case *ssa.MakeClosure: + return Returns(v.Fn) + default: + return nil + } + + var rets []*ssa.Return + done := map[*ssa.BasicBlock]bool{} + for _, b := range fn.Blocks { + rets = append(rets, returnsInBlock(b, done)...) + } + return rets +} + +func returnsInBlock(b *ssa.BasicBlock, done map[*ssa.BasicBlock]bool) (rets []*ssa.Return) { + if done[b] { + return + } + done[b] = true + + if len(b.Instrs) != 0 { + switch instr := b.Instrs[len(b.Instrs)-1].(type) { + case *ssa.Return: + rets = append(rets, instr) + } + } + + for _, s := range b.Succs { + rets = append(rets, returnsInBlock(s, done)...) + } + return +} + +// BinOp returns binary operator values which are contained in the block b. +func BinOp(b *ssa.BasicBlock) []*ssa.BinOp { + var binops []*ssa.BinOp + for _, instr := range b.Instrs { + if binop, ok := instr.(*ssa.BinOp); ok { + binops = append(binops, binop) + } + } + return binops +} + +// Used returns an instruction which uses the value in the instructions. +func Used(v ssa.Value, instrs []ssa.Instruction) ssa.Instruction { + if len(instrs) == 0 || v.Referrers() == nil { + return nil + } + + for _, instr := range instrs { + if used := usedInInstr(v, instr); used != nil { + return used + } + } + + return nil +} + +func usedInInstr(v ssa.Value, instr ssa.Instruction) ssa.Instruction { + switch instr := instr.(type) { + case *ssa.MakeClosure: + return usedInClosure(v, instr) + default: + operands := instr.Operands(nil) + for _, x := range operands { + if x != nil && *x == v { + return instr + } + } + } + + switch v := v.(type) { + case *ssa.UnOp: + return usedInInstr(v.X, instr) + } + + return nil +} + +func usedInClosure(v ssa.Value, instr *ssa.MakeClosure) ssa.Instruction { + fn, _ := instr.Fn.(*ssa.Function) + if fn == nil { + return nil + } + + var fv *ssa.FreeVar + for i := range instr.Bindings { + if instr.Bindings[i] == v { + fv = fn.FreeVars[i] + break + } + } + + if fv == nil { + return nil + } + + for _, b := range fn.Blocks { + if used := Used(fv, b.Instrs); used != nil { + return used + } + } + + return nil +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go b/vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go new file mode 100644 index 0000000..2f8a165 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go @@ -0,0 +1,37 @@ +package analysisutil + +import "golang.org/x/tools/go/ssa" + +// InspectInstr inspects from i-th instruction of start block to succsessor blocks. +func InspectInstr(start *ssa.BasicBlock, i int, f func(i int, instr ssa.Instruction) bool) { + new(instrInspector).block(start, i, f) +} + +type instrInspector struct { + done map[*ssa.BasicBlock]bool +} + +func (ins *instrInspector) block(b *ssa.BasicBlock, i int, f func(i int, instr ssa.Instruction) bool) { + if ins.done == nil { + ins.done = map[*ssa.BasicBlock]bool{} + } + + if b == nil || ins.done[b] || len(b.Instrs) <= i { + return + } + + ins.done[b] = true + ins.instrs(i, b.Instrs[i:], f) + for _, s := range b.Succs { + ins.block(s, 0, f) + } + +} + +func (ins *instrInspector) instrs(offset int, instrs []ssa.Instruction, f func(i int, instr ssa.Instruction) bool) { + for i, instr := range instrs { + if !f(offset+i, instr) { + break + } + } +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/types.go b/vendor/github.com/gostaticanalysis/analysisutil/types.go new file mode 100644 index 0000000..46b9706 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/types.go @@ -0,0 +1,208 @@ +package analysisutil + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" +) + +var errType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + +// ImplementsError return whether t implements error interface. +func ImplementsError(t types.Type) bool { + return types.Implements(t, errType) +} + +// ObjectOf returns types.Object by given name in the package. +func ObjectOf(pass *analysis.Pass, pkg, name string) types.Object { + obj := LookupFromImports(pass.Pkg.Imports(), pkg, name) + if obj != nil { + return obj + } + if RemoveVendor(pass.Pkg.Name()) != RemoveVendor(pkg) { + return nil + } + return pass.Pkg.Scope().Lookup(name) +} + +// TypeOf returns types.Type by given name in the package. +// TypeOf accepts pointer types such as *T. +func TypeOf(pass *analysis.Pass, pkg, name string) types.Type { + if name == "" { + return nil + } + + if name[0] == '*' { + obj := TypeOf(pass, pkg, name[1:]) + if obj == nil { + return nil + } + return types.NewPointer(obj) + } + + obj := ObjectOf(pass, pkg, name) + if obj == nil { + return nil + } + + return obj.Type() +} + +// MethodOf returns a method which has given name in the type. +func MethodOf(typ types.Type, name string) *types.Func { + switch typ := typ.(type) { + case *types.Named: + for i := 0; i < typ.NumMethods(); i++ { + if f := typ.Method(i); f.Name() == name { + return f + } + } + case *types.Pointer: + return MethodOf(typ.Elem(), name) + } + return nil +} + +// see: https://github.com/golang/go/issues/19670 +func identical(x, y types.Type) (ret bool) { + defer func() { + r := recover() + switch r := r.(type) { + case string: + if r == "unreachable" { + ret = false + return + } + case nil: + return + } + panic(r) + }() + return types.Identical(x, y) +} + +// Interfaces returns a map of interfaces which are declared in the package. +func Interfaces(pkg *types.Package) map[string]*types.Interface { + ifs := map[string]*types.Interface{} + + for _, n := range pkg.Scope().Names() { + o := pkg.Scope().Lookup(n) + if o != nil { + i, ok := o.Type().Underlying().(*types.Interface) + if ok { + ifs[n] = i + } + } + } + + return ifs +} + +// Structs returns a map of structs which are declared in the package. +func Structs(pkg *types.Package) map[string]*types.Struct { + structs := map[string]*types.Struct{} + + for _, n := range pkg.Scope().Names() { + o := pkg.Scope().Lookup(n) + if o != nil { + s, ok := o.Type().Underlying().(*types.Struct) + if ok { + structs[n] = s + } + } + } + + return structs +} + +// HasField returns whether the struct has the field. +func HasField(s *types.Struct, f *types.Var) bool { + if s == nil || f == nil { + return false + } + + for i := 0; i < s.NumFields(); i++ { + if s.Field(i) == f { + return true + } + } + + return false +} + +func TypesInfo(info ...*types.Info) *types.Info { + if len(info) == 0 { + return nil + } + + var merged types.Info + for i := range info { + mergeTypesInfo(&merged, info[i]) + } + + return &merged +} + +func mergeTypesInfo(i1, i2 *types.Info) { + // Types + if i1.Types == nil && i2.Types != nil { + i1.Types = map[ast.Expr]types.TypeAndValue{} + } + for expr, tv := range i2.Types { + i1.Types[expr] = tv + } + + // Defs + if i1.Defs == nil && i2.Defs != nil { + i1.Defs = map[*ast.Ident]types.Object{} + } + for ident, obj := range i2.Defs { + i1.Defs[ident] = obj + } + + // Uses + if i1.Uses == nil && i2.Uses != nil { + i1.Uses = map[*ast.Ident]types.Object{} + } + for ident, obj := range i2.Uses { + i1.Uses[ident] = obj + } + + // Implicits + if i1.Implicits == nil && i2.Implicits != nil { + i1.Implicits = map[ast.Node]types.Object{} + } + for n, obj := range i2.Implicits { + i1.Implicits[n] = obj + } + + // Selections + if i1.Selections == nil && i2.Selections != nil { + i1.Selections = map[*ast.SelectorExpr]*types.Selection{} + } + for expr, sel := range i2.Selections { + i1.Selections[expr] = sel + } + + // Scopes + if i1.Scopes == nil && i2.Scopes != nil { + i1.Scopes = map[ast.Node]*types.Scope{} + } + for n, s := range i2.Scopes { + i1.Scopes[n] = s + } + + // InitOrder + i1.InitOrder = append(i1.InitOrder, i2.InitOrder...) +} + +// Under returns the most bottom underlying type. +func Under(t types.Type) types.Type { + switch t := t.(type) { + case *types.Named: + return Under(t.Underlying()) + default: + return t + } +} diff --git a/vendor/github.com/gostaticanalysis/comment/LICENSE b/vendor/github.com/gostaticanalysis/comment/LICENSE new file mode 100644 index 0000000..4f7eeff --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Takuya Ueda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/gostaticanalysis/comment/README.md b/vendor/github.com/gostaticanalysis/comment/README.md new file mode 100644 index 0000000..5335553 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/README.md @@ -0,0 +1,10 @@ +# gostaticanalysis/comment + +[![godoc.org][godoc-badge]][godoc] + +`comment` provides utilities for [ast.CommentMap](https://golang.org/pkg/go/ast/#CommentMap). + + +[godoc]: https://godoc.org/github.com/gostaticanalysis/comment +[godoc-badge]: https://img.shields.io/badge/godoc-reference-4F73B3.svg?style=flat-square&label=%20godoc.org + diff --git a/vendor/github.com/gostaticanalysis/comment/comment.go b/vendor/github.com/gostaticanalysis/comment/comment.go new file mode 100644 index 0000000..2fe67fa --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/comment.go @@ -0,0 +1,147 @@ +package comment + +import ( + "go/ast" + "go/token" + "strings" +) + +// Maps is slice of ast.CommentMap. +type Maps []ast.CommentMap + +// New creates new a CommentMap slice from specified files. +func New(fset *token.FileSet, files []*ast.File) Maps { + maps := make(Maps, len(files)) + for i := range files { + maps[i] = ast.NewCommentMap(fset, files[i], files[i].Comments) + } + return maps +} + +// Comments returns correspond a CommentGroup slice to specified AST node. +func (maps Maps) Comments(n ast.Node) []*ast.CommentGroup { + for i := range maps { + if maps[i][n] != nil { + return maps[i][n] + } + } + return nil +} + +// CommentsByPos returns correspond a CommentGroup slice to specified pos. +func (maps Maps) CommentsByPos(pos token.Pos) []*ast.CommentGroup { + for i := range maps { + for n, cgs := range maps[i] { + if n.Pos() == pos { + return cgs + } + } + } + return nil +} + +// Annotated checks either specified AST node is annotated or not. +func (maps Maps) Annotated(n ast.Node, annotation string) bool { + for _, cg := range maps.Comments(n) { + if strings.HasPrefix(strings.TrimSpace(cg.Text()), annotation) { + return true + } + } + return false +} + +// Ignore checks either specified AST node is ignored by the check. +// It follows staticcheck style as the below. +// //lint:ignore Check1[,Check2,...,CheckN] reason +func (maps Maps) Ignore(n ast.Node, check string) bool { + for _, cg := range maps.Comments(n) { + if hasIgnoreCheck(cg, check) { + return true + } + } + return false +} + +// IgnorePos checks either specified postion of AST node is ignored by the check. +// It follows staticcheck style as the below. +// //lint:ignore Check1[,Check2,...,CheckN] reason +func (maps Maps) IgnorePos(pos token.Pos, check string) bool { + for _, cg := range maps.CommentsByPos(pos) { + if hasIgnoreCheck(cg, check) { + return true + } + } + return false +} + +// Deprecated: This function does not work with multiple files. +// CommentsByPosLine can be used instead of CommentsByLine. +// +// CommentsByLine returns correspond a CommentGroup slice to specified line. +func (maps Maps) CommentsByLine(fset *token.FileSet, line int) []*ast.CommentGroup { + for i := range maps { + for n, cgs := range maps[i] { + l := fset.File(n.Pos()).Line(n.Pos()) + if l == line { + return cgs + } + } + } + return nil +} + +// CommentsByPosLine returns correspond a CommentGroup slice to specified line. +func (maps Maps) CommentsByPosLine(fset *token.FileSet, pos token.Pos) []*ast.CommentGroup { + f1 := fset.File(pos) + for i := range maps { + for n, cgs := range maps[i] { + f2 := fset.File(n.Pos()) + if f1 != f2 { + // different file + continue + } + + if f1.Line(pos) == f2.Line(n.Pos()) { + return cgs + } + } + } + return nil +} + +// IgnoreLine checks either specified lineof AST node is ignored by the check. +// It follows staticcheck style as the below. +// //lint:ignore Check1[,Check2,...,CheckN] reason +func (maps Maps) IgnoreLine(fset *token.FileSet, line int, check string) bool { + for _, cg := range maps.CommentsByLine(fset, line) { + if hasIgnoreCheck(cg, check) { + return true + } + } + return false +} + +// hasIgnoreCheck returns true if the provided CommentGroup starts with a comment +// of the form "//lint:ignore Check1[,Check2,...,CheckN] reason" and one of the +// checks matches the provided check. The *ast.CommentGroup is checked directly +// rather than using "cg.Text()" because, starting in Go 1.15, the "cg.Text()" call +// no longer returns directive-style comments (see https://github.com/golang/go/issues/37974). +func hasIgnoreCheck(cg *ast.CommentGroup, check string) bool { + if !strings.HasPrefix(cg.List[0].Text, "//") { + return false + } + + s := strings.TrimSpace(cg.List[0].Text[2:]) + txt := strings.Split(s, " ") + if len(txt) < 3 || txt[0] != "lint:ignore" { + return false + } + + checks := strings.Split(txt[1], ",") + for i := range checks { + if check == checks[i] { + return true + } + } + return false +} diff --git a/vendor/github.com/gostaticanalysis/comment/go.mod b/vendor/github.com/gostaticanalysis/comment/go.mod new file mode 100644 index 0000000..2755681 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/go.mod @@ -0,0 +1,8 @@ +module github.com/gostaticanalysis/comment + +go 1.12 + +require ( + github.com/google/go-cmp v0.5.1 + golang.org/x/tools v0.0.0-20200820010801-b793a1359eac +) diff --git a/vendor/github.com/gostaticanalysis/comment/go.sum b/vendor/github.com/gostaticanalysis/comment/go.sum new file mode 100644 index 0000000..425807c --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/go.sum @@ -0,0 +1,24 @@ +github.com/google/go-cmp v0.5.1 h1:JFrFEBb2xKufg6XkJsJr+WbKb4FQlURi5RUcBveYu9k= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac h1:DugppSxw0LSF8lcjaODPJZoDzq0ElTGskTst3ZaBkHI= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go b/vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go new file mode 100644 index 0000000..9266d98 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go @@ -0,0 +1,20 @@ +package commentmap + +import ( + "reflect" + + "github.com/gostaticanalysis/comment" + "golang.org/x/tools/go/analysis" +) + +var Analyzer = &analysis.Analyzer{ + Name: "commentmap", + Doc: "create comment map", + Run: run, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(comment.Maps{}), +} + +func run(pass *analysis.Pass) (interface{}, error) { + return comment.New(pass.Fset, pass.Files), nil +} diff --git a/vendor/github.com/golangci/goconst/LICENSE b/vendor/github.com/jgautheron/goconst/LICENSE similarity index 100% rename from vendor/github.com/golangci/goconst/LICENSE rename to vendor/github.com/jgautheron/goconst/LICENSE diff --git a/vendor/github.com/golangci/goconst/README.md b/vendor/github.com/jgautheron/goconst/README.md similarity index 96% rename from vendor/github.com/golangci/goconst/README.md rename to vendor/github.com/jgautheron/goconst/README.md index 04fc9b0..8dd093b 100644 --- a/vendor/github.com/golangci/goconst/README.md +++ b/vendor/github.com/jgautheron/goconst/README.md @@ -31,6 +31,7 @@ Flags: -min minimum value, only works with -numbers -max maximum value, only works with -numbers -output output formatting (text or json) + -set-exit-status Set exit status to 2 if any issues are found Examples: diff --git a/vendor/github.com/jgautheron/goconst/api.go b/vendor/github.com/jgautheron/goconst/api.go new file mode 100644 index 0000000..e58894b --- /dev/null +++ b/vendor/github.com/jgautheron/goconst/api.go @@ -0,0 +1,67 @@ +package goconst + +import ( + "go/ast" + "go/token" +) + +type Issue struct { + Pos token.Position + OccurrencesCount int + Str string + MatchingConst string +} + +type Config struct { + MatchWithConstants bool + MinStringLength int + MinOccurrences int + ParseNumbers bool + NumberMin int + NumberMax int + ExcludeTypes map[Type]bool +} + +func Run(files []*ast.File, fset *token.FileSet, cfg *Config) ([]Issue, error) { + p := New( + "", + "", + false, + cfg.MatchWithConstants, + cfg.ParseNumbers, + cfg.NumberMin, + cfg.NumberMax, + cfg.MinStringLength, + cfg.MinOccurrences, + cfg.ExcludeTypes, + ) + var issues []Issue + for _, f := range files { + ast.Walk(&treeVisitor{ + fileSet: fset, + packageName: "", + fileName: "", + p: p, + }, f) + } + p.ProcessResults() + + for str, item := range p.strs { + fi := item[0] + i := Issue{ + Pos: fi.Position, + OccurrencesCount: len(item), + Str: str, + } + + if len(p.consts) != 0 { + if cst, ok := p.consts[str]; ok { + // const should be in the same package and exported + i.MatchingConst = cst.Name + } + } + issues = append(issues, i) + } + + return issues, nil +} diff --git a/vendor/github.com/jgautheron/goconst/go.mod b/vendor/github.com/jgautheron/goconst/go.mod new file mode 100644 index 0000000..53dbfbb --- /dev/null +++ b/vendor/github.com/jgautheron/goconst/go.mod @@ -0,0 +1,3 @@ +module github.com/jgautheron/goconst + +go 1.13 diff --git a/vendor/github.com/golangci/goconst/parser.go b/vendor/github.com/jgautheron/goconst/parser.go similarity index 75% rename from vendor/github.com/golangci/goconst/parser.go rename to vendor/github.com/jgautheron/goconst/parser.go index ab5f99a..2ed7a9a 100644 --- a/vendor/github.com/golangci/goconst/parser.go +++ b/vendor/github.com/jgautheron/goconst/parser.go @@ -14,6 +14,7 @@ import ( "os" "path/filepath" "regexp" + "strconv" "strings" ) @@ -25,7 +26,9 @@ type Parser struct { // Meant to be passed via New() path, ignore string ignoreTests, matchConstant bool - minLength int + minLength, minOccurrences int + numberMin, numberMax int + excludeTypes map[Type]bool supportedTokens []token.Token @@ -36,7 +39,7 @@ type Parser struct { // New creates a new instance of the parser. // This is your entry point if you'd like to use goconst as an API. -func New(path, ignore string, ignoreTests, matchConstant, numbers bool, minLength int) *Parser { +func New(path, ignore string, ignoreTests, matchConstant, numbers bool, numberMin, numberMax, minLength, minOccurrences int, excludeTypes map[Type]bool) *Parser { supportedTokens := []token.Token{token.STRING} if numbers { supportedTokens = append(supportedTokens, token.INT, token.FLOAT) @@ -48,7 +51,11 @@ func New(path, ignore string, ignoreTests, matchConstant, numbers bool, minLengt ignoreTests: ignoreTests, matchConstant: matchConstant, minLength: minLength, + minOccurrences: minOccurrences, + numberMin: numberMin, + numberMax: numberMax, supportedTokens: supportedTokens, + excludeTypes: excludeTypes, // Initialize the maps strs: Strings{}, @@ -77,9 +84,32 @@ func (p *Parser) ParseTree() (Strings, Constants, error) { } else { p.parseDir(p.path) } + + p.ProcessResults() + return p.strs, p.consts, nil } +// ProcessResults post-processes the raw results. +func (p *Parser) ProcessResults() { + for str, item := range p.strs { + // Filter out items whose occurrences don't match the min value + if len(item) < p.minOccurrences { + delete(p.strs, str) + } + + // If the value is a number + if i, err := strconv.Atoi(str); err == nil { + if p.numberMin != 0 && i < p.numberMin { + delete(p.strs, str) + } + if p.numberMax != 0 && i > p.numberMax { + delete(p.strs, str) + } + } + } +} + func (p *Parser) parseDir(dir string) error { fset := token.NewFileSet() pkgs, err := parser.ParseDir(fset, dir, func(info os.FileInfo) bool { @@ -135,54 +165,12 @@ type ExtendedPos struct { packageName string } -type Issue struct { - Pos token.Position - OccurencesCount int - Str string - MatchingConst string -} - -type Config struct { - MatchWithConstants bool - MinStringLength int - MinOccurrences int -} - -func Run(files []*ast.File, fset *token.FileSet, cfg *Config) ([]Issue, error) { - p := New("", "", false, cfg.MatchWithConstants, false, cfg.MinStringLength) - var issues []Issue - for _, f := range files { - ast.Walk(&treeVisitor{ - fileSet: fset, - packageName: "", - fileName: "", - p: p, - }, f) - } +type Type int - for str, item := range p.strs { - // Filter out items whose occurrences don't match the min value - if len(item) < cfg.MinOccurrences { - delete(p.strs, str) - } - } - - for str, item := range p.strs { - fi := item[0] - i := Issue{ - Pos: fi.Position, - OccurencesCount: len(item), - Str: str, - } - - if len(p.consts) != 0 { - if cst, ok := p.consts[str]; ok { - // const should be in the same package and exported - i.MatchingConst = cst.Name - } - } - issues = append(issues, i) - } - - return issues, nil -} +const ( + Assignment Type = iota + Binary + Case + Return + Call +) diff --git a/vendor/github.com/golangci/goconst/visitor.go b/vendor/github.com/jgautheron/goconst/visitor.go similarity index 79% rename from vendor/github.com/golangci/goconst/visitor.go rename to vendor/github.com/jgautheron/goconst/visitor.go index a86421f..c0974da 100644 --- a/vendor/github.com/golangci/goconst/visitor.go +++ b/vendor/github.com/jgautheron/goconst/visitor.go @@ -3,6 +3,7 @@ package goconst import ( "go/ast" "go/token" + "strconv" "strings" ) @@ -56,7 +57,7 @@ func (v *treeVisitor) Visit(node ast.Node) ast.Visitor { continue } - v.addString(lit.Value, rhs.(*ast.BasicLit).Pos()) + v.addString(lit.Value, rhs.(*ast.BasicLit).Pos(), Assignment) } // if foo == "moo" @@ -70,12 +71,12 @@ func (v *treeVisitor) Visit(node ast.Node) ast.Visitor { lit, ok = t.X.(*ast.BasicLit) if ok && v.isSupported(lit.Kind) { - v.addString(lit.Value, lit.Pos()) + v.addString(lit.Value, lit.Pos(), Binary) } lit, ok = t.Y.(*ast.BasicLit) if ok && v.isSupported(lit.Kind) { - v.addString(lit.Value, lit.Pos()) + v.addString(lit.Value, lit.Pos(), Binary) } // case "foo": @@ -83,7 +84,7 @@ func (v *treeVisitor) Visit(node ast.Node) ast.Visitor { for _, item := range t.List { lit, ok := item.(*ast.BasicLit) if ok && v.isSupported(lit.Kind) { - v.addString(lit.Value, lit.Pos()) + v.addString(lit.Value, lit.Pos(), Case) } } @@ -92,7 +93,16 @@ func (v *treeVisitor) Visit(node ast.Node) ast.Visitor { for _, item := range t.Results { lit, ok := item.(*ast.BasicLit) if ok && v.isSupported(lit.Kind) { - v.addString(lit.Value, lit.Pos()) + v.addString(lit.Value, lit.Pos(), Return) + } + } + + // fn("http://") + case *ast.CallExpr: + for _, item := range t.Args { + lit, ok := item.(*ast.BasicLit) + if ok && v.isSupported(lit.Kind) { + v.addString(lit.Value, lit.Pos(), Call) } } } @@ -101,8 +111,15 @@ func (v *treeVisitor) Visit(node ast.Node) ast.Visitor { } // addString adds a string in the map along with its position in the tree. -func (v *treeVisitor) addString(str string, pos token.Pos) { - str = strings.Replace(str, `"`, "", 2) +func (v *treeVisitor) addString(str string, pos token.Pos, typ Type) { + ok, excluded := v.p.excludeTypes[typ] + if ok && excluded { + return + } + // Drop quotes if any + if strings.HasPrefix(str, `"`) || strings.HasPrefix(str, "`") { + str, _ = strconv.Unquote(str) + } // Ignore empty strings if len(str) == 0 { @@ -113,7 +130,7 @@ func (v *treeVisitor) addString(str string, pos token.Pos) { return } - _, ok := v.p.strs[str] + _, ok = v.p.strs[str] if !ok { v.p.strs[str] = make([]ExtendedPos, 0) } diff --git a/vendor/github.com/konsorten/go-windows-terminal-sequences/LICENSE b/vendor/github.com/konsorten/go-windows-terminal-sequences/LICENSE deleted file mode 100644 index 14127cd..0000000 --- a/vendor/github.com/konsorten/go-windows-terminal-sequences/LICENSE +++ /dev/null @@ -1,9 +0,0 @@ -(The MIT License) - -Copyright (c) 2017 marvin + konsorten GmbH (open-source@konsorten.de) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/konsorten/go-windows-terminal-sequences/README.md b/vendor/github.com/konsorten/go-windows-terminal-sequences/README.md deleted file mode 100644 index 195333e..0000000 --- a/vendor/github.com/konsorten/go-windows-terminal-sequences/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# Windows Terminal Sequences - -This library allow for enabling Windows terminal color support for Go. - -See [Console Virtual Terminal Sequences](https://docs.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences) for details. - -## Usage - -```go -import ( - "syscall" - - sequences "github.com/konsorten/go-windows-terminal-sequences" -) - -func main() { - sequences.EnableVirtualTerminalProcessing(syscall.Stdout, true) -} - -``` - -## Authors - -The tool is sponsored by the [marvin + konsorten GmbH](http://www.konsorten.de). - -We thank all the authors who provided code to this library: - -* Felix Kollmann -* Nicolas Perraut - -## License - -(The MIT License) - -Copyright (c) 2018 marvin + konsorten GmbH (open-source@konsorten.de) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/konsorten/go-windows-terminal-sequences/go.mod b/vendor/github.com/konsorten/go-windows-terminal-sequences/go.mod deleted file mode 100644 index 716c613..0000000 --- a/vendor/github.com/konsorten/go-windows-terminal-sequences/go.mod +++ /dev/null @@ -1 +0,0 @@ -module github.com/konsorten/go-windows-terminal-sequences diff --git a/vendor/github.com/konsorten/go-windows-terminal-sequences/sequences.go b/vendor/github.com/konsorten/go-windows-terminal-sequences/sequences.go deleted file mode 100644 index ef18d8f..0000000 --- a/vendor/github.com/konsorten/go-windows-terminal-sequences/sequences.go +++ /dev/null @@ -1,36 +0,0 @@ -// +build windows - -package sequences - -import ( - "syscall" - "unsafe" -) - -var ( - kernel32Dll *syscall.LazyDLL = syscall.NewLazyDLL("Kernel32.dll") - setConsoleMode *syscall.LazyProc = kernel32Dll.NewProc("SetConsoleMode") -) - -func EnableVirtualTerminalProcessing(stream syscall.Handle, enable bool) error { - const ENABLE_VIRTUAL_TERMINAL_PROCESSING uint32 = 0x4 - - var mode uint32 - err := syscall.GetConsoleMode(syscall.Stdout, &mode) - if err != nil { - return err - } - - if enable { - mode |= ENABLE_VIRTUAL_TERMINAL_PROCESSING - } else { - mode &^= ENABLE_VIRTUAL_TERMINAL_PROCESSING - } - - ret, _, err := setConsoleMode.Call(uintptr(unsafe.Pointer(stream)), uintptr(mode)) - if ret == 0 { - return err - } - - return nil -} diff --git a/vendor/github.com/konsorten/go-windows-terminal-sequences/sequences_dummy.go b/vendor/github.com/konsorten/go-windows-terminal-sequences/sequences_dummy.go deleted file mode 100644 index df61a6f..0000000 --- a/vendor/github.com/konsorten/go-windows-terminal-sequences/sequences_dummy.go +++ /dev/null @@ -1,11 +0,0 @@ -// +build linux darwin - -package sequences - -import ( - "fmt" -) - -func EnableVirtualTerminalProcessing(stream uintptr, enable bool) error { - return fmt.Errorf("windows only package") -} diff --git a/vendor/github.com/kulti/thelper/LICENSE b/vendor/github.com/kulti/thelper/LICENSE new file mode 100644 index 0000000..e070215 --- /dev/null +++ b/vendor/github.com/kulti/thelper/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Aleksey Bakin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go new file mode 100644 index 0000000..2e49cf2 --- /dev/null +++ b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go @@ -0,0 +1,349 @@ +package analyzer + +import ( + "flag" + "fmt" + "go/ast" + "go/token" + "go/types" + "sort" + "strings" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const doc = "thelper detects tests helpers which is not start with t.Helper() method." +const checksDoc = `coma separated list of enabled checks + +Available checks + +` + checkTBegin + ` - check t.Helper() begins helper function +` + checkTFirst + ` - check *testing.T is first param of helper function +` + checkTName + ` - check *testing.T param has t name + +Also available similar checks for benchmark helpers: ` + + checkBBegin + `, ` + checkBFirst + `, ` + checkBName + ` + +` + +type enabledChecksValue map[string]struct{} + +func (m enabledChecksValue) Enabled(c string) bool { + _, ok := m[c] + return ok +} + +func (m enabledChecksValue) String() string { + ss := make([]string, 0, len(m)) + for s := range m { + ss = append(ss, s) + } + sort.Strings(ss) + return strings.Join(ss, ",") +} + +func (m enabledChecksValue) Set(s string) error { + ss := strings.FieldsFunc(s, func(c rune) bool { return c == ',' }) + if len(ss) == 0 { + return nil + } + + for k := range m { + delete(m, k) + } + for _, v := range ss { + switch v { + case checkTBegin, checkTFirst, checkTName, checkBBegin, checkBFirst, checkBName: + m[v] = struct{}{} + default: + return fmt.Errorf("unknown check name %q (see help for full list)", v) + } + } + return nil +} + +const ( + checkTBegin = "t_begin" + checkTFirst = "t_first" + checkTName = "t_name" + checkBBegin = "b_begin" + checkBFirst = "b_first" + checkBName = "b_name" +) + +type thelper struct { + enabledChecks enabledChecksValue +} + +// NewAnalyzer return a new thelper analyzer. +// thelper analyzes Go test codes how they use t.Helper() method. +func NewAnalyzer() *analysis.Analyzer { + thelper := thelper{} + thelper.enabledChecks = enabledChecksValue{ + checkTBegin: struct{}{}, + checkTFirst: struct{}{}, + checkTName: struct{}{}, + checkBBegin: struct{}{}, + checkBFirst: struct{}{}, + checkBName: struct{}{}, + } + + a := &analysis.Analyzer{ + Name: "thelper", + Doc: doc, + Run: thelper.run, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, + } + + a.Flags.Init("thelper", flag.ExitOnError) + a.Flags.Var(&thelper.enabledChecks, "checks", checksDoc) + + return a +} + +func (t thelper) run(pass *analysis.Pass) (interface{}, error) { + var ctxType types.Type + ctxObj := analysisutil.ObjectOf(pass, "context", "Context") + if ctxObj != nil { + ctxType = ctxObj.Type() + } + + tCheckOpts, ok := t.buildTestCheckFuncOpts(pass, ctxType) + if !ok { + return nil, nil + } + + bCheckOpts, ok := t.buildBenchmarkCheckFuncOpts(pass, ctxType) + if !ok { + return nil, nil + } + + var reports reports + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(node ast.Node) { + var fd funcDecl + switch n := node.(type) { + case *ast.FuncLit: + fd.Pos = n.Pos() + fd.Type = n.Type + fd.Body = n.Body + fd.Name = ast.NewIdent("") + case *ast.FuncDecl: + fd.Pos = n.Name.NamePos + fd.Type = n.Type + fd.Body = n.Body + fd.Name = n.Name + case *ast.CallExpr: + reports.Filter(subtestPos(pass, n, tCheckOpts.tbRun)) + reports.Filter(subtestPos(pass, n, bCheckOpts.tbRun)) + return + default: + return + } + + checkFunc(pass, &reports, fd, tCheckOpts) + checkFunc(pass, &reports, fd, bCheckOpts) + }) + + reports.Flush(pass) + + return nil, nil +} + +type checkFuncOpts struct { + skipPrefix string + varName string + tbHelper types.Object + tbRun types.Object + tbType types.Type + ctxType types.Type + checkBegin bool + checkFirst bool + checkName bool +} + +func (t thelper) buildTestCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { + tObj := analysisutil.ObjectOf(pass, "testing", "T") + if tObj == nil { + return checkFuncOpts{}, false + } + + tHelper, _, _ := types.LookupFieldOrMethod(tObj.Type(), true, tObj.Pkg(), "Helper") + if tHelper == nil { + return checkFuncOpts{}, false + } + + tRun, _, _ := types.LookupFieldOrMethod(tObj.Type(), true, tObj.Pkg(), "Run") + if tRun == nil { + return checkFuncOpts{}, false + } + + return checkFuncOpts{ + skipPrefix: "Test", + varName: "t", + tbHelper: tHelper, + tbRun: tRun, + tbType: types.NewPointer(tObj.Type()), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkTBegin), + checkFirst: t.enabledChecks.Enabled(checkTFirst), + checkName: t.enabledChecks.Enabled(checkTName), + }, true +} + +func (t thelper) buildBenchmarkCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { + bObj := analysisutil.ObjectOf(pass, "testing", "B") + if bObj == nil { + return checkFuncOpts{}, false + } + + bHelper, _, _ := types.LookupFieldOrMethod(bObj.Type(), true, bObj.Pkg(), "Helper") + if bHelper == nil { + return checkFuncOpts{}, false + } + + bRun, _, _ := types.LookupFieldOrMethod(bObj.Type(), true, bObj.Pkg(), "Run") + if bRun == nil { + return checkFuncOpts{}, false + } + + return checkFuncOpts{ + skipPrefix: "Benchmark", + varName: "b", + tbHelper: bHelper, + tbRun: bRun, + tbType: types.NewPointer(bObj.Type()), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkBBegin), + checkFirst: t.enabledChecks.Enabled(checkBFirst), + checkName: t.enabledChecks.Enabled(checkBName), + }, true +} + +type funcDecl struct { + Pos token.Pos + Name *ast.Ident + Type *ast.FuncType + Body *ast.BlockStmt +} + +func checkFunc(pass *analysis.Pass, reports *reports, funcDecl funcDecl, opts checkFuncOpts) { + if strings.HasPrefix(funcDecl.Name.Name, opts.skipPrefix) { + return + } + + p, pos, ok := searchFuncParam(pass, funcDecl, opts.tbType) + if !ok { + return + } + + if opts.checkFirst { + if pos != 0 { + checkFirstPassed := false + if pos == 1 && opts.ctxType != nil { + _, pos, ok := searchFuncParam(pass, funcDecl, opts.ctxType) + checkFirstPassed = ok && (pos == 0) + } + + if !checkFirstPassed { + reports.Reportf(funcDecl.Pos, "parameter %s should be the first or after context.Context", opts.tbType) + } + } + } + + if opts.checkName { + if len(p.Names) > 0 && p.Names[0].Name != opts.varName { + reports.Reportf(funcDecl.Pos, "parameter %s should have name %s", opts.tbType, opts.varName) + } + } + + if opts.checkBegin { + if len(funcDecl.Body.List) == 0 || !isTHelperCall(pass, funcDecl.Body.List[0], opts.tbHelper) { + reports.Reportf(funcDecl.Pos, "test helper function should start from %s.Helper()", opts.varName) + } + } +} + +func searchFuncParam(pass *analysis.Pass, f funcDecl, p types.Type) (*ast.Field, int, bool) { + for i, f := range f.Type.Params.List { + typeInfo, ok := pass.TypesInfo.Types[f.Type] + if !ok { + continue + } + + if types.Identical(typeInfo.Type, p) { + return f, i, true + } + } + return nil, 0, false +} + +func isTHelperCall(pass *analysis.Pass, s ast.Stmt, tHelper types.Object) bool { + exprStmt, ok := s.(*ast.ExprStmt) + if !ok { + return false + } + + callExpr, ok := exprStmt.X.(*ast.CallExpr) + if !ok { + return false + } + + selExpr, ok := callExpr.Fun.(*ast.SelectorExpr) + if !ok { + return false + } + + return isSelectorCall(pass, selExpr, tHelper) +} + +func subtestPos(pass *analysis.Pass, e *ast.CallExpr, tbRun types.Object) token.Pos { + selExpr, ok := e.Fun.(*ast.SelectorExpr) + if !ok { + return token.NoPos + } + + if !isSelectorCall(pass, selExpr, tbRun) { + return token.NoPos + } + + if len(e.Args) != 2 { + return token.NoPos + } + + anonFunLit, ok := e.Args[1].(*ast.FuncLit) + if ok { + return anonFunLit.Pos() + } + + funIdent, ok := e.Args[1].(*ast.Ident) + if !ok { + return token.NoPos + } + + funDef, ok := pass.TypesInfo.Uses[funIdent] + if !ok { + return token.NoPos + } + + return funDef.Pos() +} + +func isSelectorCall(pass *analysis.Pass, selExpr *ast.SelectorExpr, callObj types.Object) bool { + sel, ok := pass.TypesInfo.Selections[selExpr] + if !ok { + return false + } + + return sel.Obj() == callObj +} diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/report.go b/vendor/github.com/kulti/thelper/pkg/analyzer/report.go new file mode 100644 index 0000000..1dd3eec --- /dev/null +++ b/vendor/github.com/kulti/thelper/pkg/analyzer/report.go @@ -0,0 +1,44 @@ +package analyzer + +import ( + "go/token" + + "golang.org/x/tools/go/analysis" +) + +type reports struct { + reports []report + filter map[token.Pos]struct{} +} + +type report struct { + pos token.Pos + format string + args []interface{} +} + +func (rr *reports) Reportf(pos token.Pos, format string, args ...interface{}) { + rr.reports = append(rr.reports, report{ + pos: pos, + format: format, + args: args, + }) +} + +func (rr *reports) Filter(pos token.Pos) { + if pos.IsValid() { + if rr.filter == nil { + rr.filter = make(map[token.Pos]struct{}) + } + rr.filter[pos] = struct{}{} + } +} + +func (rr reports) Flush(pass *analysis.Pass) { + for _, r := range rr.reports { + if _, ok := rr.filter[r.pos]; ok { + continue + } + pass.Reportf(r.pos, r.format, r.args...) + } +} diff --git a/vendor/github.com/kunwardeep/paralleltest/LICENSE b/vendor/github.com/kunwardeep/paralleltest/LICENSE new file mode 100644 index 0000000..d06a809 --- /dev/null +++ b/vendor/github.com/kunwardeep/paralleltest/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Isaev Denis + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go b/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go new file mode 100644 index 0000000..31f6f29 --- /dev/null +++ b/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go @@ -0,0 +1,256 @@ +package paralleltest + +import ( + "go/ast" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check that tests use t.Parallel() method +It also checks that the t.Parallel is used if multiple tests cases are run as part of single test. +As part of ensuring parallel tests works as expected it checks for reinitialising of the range value +over the test cases.(https://tinyurl.com/y6555cy6)` + +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "paralleltest", + Doc: Doc, + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspector := inspector.New(pass.Files) + + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + } + + inspector.Preorder(nodeFilter, func(node ast.Node) { + funcDecl := node.(*ast.FuncDecl) + var funcHasParallelMethod, + rangeStatementOverTestCasesExists, + rangeStatementHasParallelMethod, + testLoopVariableReinitialised bool + var testRunLoopIdentifier string + var numberOfTestRun int + var positionOfTestRunNode []ast.Node + var rangeNode ast.Node + + // Check runs for test functions only + if !isTestFunction(funcDecl) { + return + } + + for _, l := range funcDecl.Body.List { + switch v := l.(type) { + + case *ast.ExprStmt: + ast.Inspect(v, func(n ast.Node) bool { + // Check if the test method is calling t.parallel + if !funcHasParallelMethod { + funcHasParallelMethod = methodParallelIsCalledInTestFunction(n) + } + + // Check if the t.Run within the test function is calling t.parallel + if methodRunIsCalledInTestFunction(n) { + hasParallel := false + numberOfTestRun++ + ast.Inspect(v, func(p ast.Node) bool { + if !hasParallel { + hasParallel = methodParallelIsCalledInTestFunction(p) + } + return true + }) + if !hasParallel { + positionOfTestRunNode = append(positionOfTestRunNode, n) + } + } + return true + }) + + // Check if the range over testcases is calling t.parallel + case *ast.RangeStmt: + rangeNode = v + + ast.Inspect(v, func(n ast.Node) bool { + // nolint: gocritic + switch r := n.(type) { + case *ast.ExprStmt: + if methodRunIsCalledInRangeStatement(r.X) { + rangeStatementOverTestCasesExists = true + testRunLoopIdentifier = methodRunFirstArgumentObjectName(r.X) + + if !rangeStatementHasParallelMethod { + rangeStatementHasParallelMethod = methodParallelIsCalledInMethodRun(r.X) + } + } + } + return true + }) + + // Check for the range loop value identifier re assignment + // More info here https://gist.github.com/kunwardeep/80c2e9f3d3256c894898bae82d9f75d0 + if rangeStatementOverTestCasesExists { + var rangeValueIdentifier string + if i, ok := v.Value.(*ast.Ident); ok { + rangeValueIdentifier = i.Name + } + + testLoopVariableReinitialised = testCaseLoopVariableReinitialised(v.Body.List, rangeValueIdentifier, testRunLoopIdentifier) + } + } + } + + if !funcHasParallelMethod { + pass.Reportf(node.Pos(), "Function %s missing the call to method parallel\n", funcDecl.Name.Name) + } + + if rangeStatementOverTestCasesExists && rangeNode != nil { + if !rangeStatementHasParallelMethod { + pass.Reportf(rangeNode.Pos(), "Range statement for test %s missing the call to method parallel in test Run\n", funcDecl.Name.Name) + } else { + if testRunLoopIdentifier == "" { + pass.Reportf(rangeNode.Pos(), "Range statement for test %s does not use range value in test Run\n", funcDecl.Name.Name) + } else if !testLoopVariableReinitialised { + pass.Reportf(rangeNode.Pos(), "Range statement for test %s does not reinitialise the variable %s\n", funcDecl.Name.Name, testRunLoopIdentifier) + } + } + } + + // Check if the t.Run is more than one as there is no point making one test parallel + if numberOfTestRun > 1 && len(positionOfTestRunNode) > 0 { + for _, n := range positionOfTestRunNode { + pass.Reportf(n.Pos(), "Function %s has missing the call to method parallel in the test run\n", funcDecl.Name.Name) + } + } + }) + + return nil, nil +} + +func testCaseLoopVariableReinitialised(statements []ast.Stmt, rangeValueIdentifier string, testRunLoopIdentifier string) bool { + if len(statements) > 1 { + for _, s := range statements { + leftIdentifier, rightIdentifier := getLeftAndRightIdentifier(s) + if leftIdentifier == testRunLoopIdentifier && rightIdentifier == rangeValueIdentifier { + return true + } + } + } + return false +} + +// Return the left hand side and the right hand side identifiers name +func getLeftAndRightIdentifier(s ast.Stmt) (string, string) { + var leftIdentifier, rightIdentifier string + // nolint: gocritic + switch v := s.(type) { + case *ast.AssignStmt: + if len(v.Rhs) == 1 { + if i, ok := v.Rhs[0].(*ast.Ident); ok { + rightIdentifier = i.Name + } + } + if len(v.Lhs) == 1 { + if i, ok := v.Lhs[0].(*ast.Ident); ok { + leftIdentifier = i.Name + } + } + } + return leftIdentifier, rightIdentifier +} + +func methodParallelIsCalledInMethodRun(node ast.Node) bool { + var methodParallelCalled bool + // nolint: gocritic + switch callExp := node.(type) { + case *ast.CallExpr: + for _, arg := range callExp.Args { + if !methodParallelCalled { + ast.Inspect(arg, func(n ast.Node) bool { + if !methodParallelCalled { + methodParallelCalled = methodParallelIsCalledInRunMethod(n) + return true + } + return false + }) + } + } + } + return methodParallelCalled +} + +func methodParallelIsCalledInRunMethod(node ast.Node) bool { + return exprCallHasMethod(node, "Parallel") +} + +func methodParallelIsCalledInTestFunction(node ast.Node) bool { + return exprCallHasMethod(node, "Parallel") +} + +func methodRunIsCalledInRangeStatement(node ast.Node) bool { + return exprCallHasMethod(node, "Run") +} + +func methodRunIsCalledInTestFunction(node ast.Node) bool { + return exprCallHasMethod(node, "Run") +} +func exprCallHasMethod(node ast.Node, methodName string) bool { + // nolint: gocritic + switch n := node.(type) { + case *ast.CallExpr: + if fun, ok := n.Fun.(*ast.SelectorExpr); ok { + return fun.Sel.Name == methodName + } + } + return false +} + +// Gets the object name `tc` from method t.Run(tc.Foo, func(t *testing.T) +func methodRunFirstArgumentObjectName(node ast.Node) string { + // nolint: gocritic + switch n := node.(type) { + case *ast.CallExpr: + for _, arg := range n.Args { + if s, ok := arg.(*ast.SelectorExpr); ok { + if i, ok := s.X.(*ast.Ident); ok { + return i.Name + } + } + } + } + return "" +} + +// Checks if the function has the param type *testing.T) +func isTestFunction(funcDecl *ast.FuncDecl) bool { + testMethodPackageType := "testing" + testMethodStruct := "T" + testPrefix := "Test" + + if !strings.HasPrefix(funcDecl.Name.Name, testPrefix) { + return false + } + + if funcDecl.Type.Params != nil && len(funcDecl.Type.Params.List) != 1 { + return false + } + + param := funcDecl.Type.Params.List[0] + if starExp, ok := param.Type.(*ast.StarExpr); ok { + if selectExpr, ok := starExp.X.(*ast.SelectorExpr); ok { + if selectExpr.Sel.Name == testMethodStruct { + if s, ok := selectExpr.X.(*ast.Ident); ok { + return s.Name == testMethodPackageType + } + } + } + } + + return false +} diff --git a/vendor/github.com/kyoh86/exportloopref/.golangci.yml b/vendor/github.com/kyoh86/exportloopref/.golangci.yml new file mode 100644 index 0000000..e876057 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/.golangci.yml @@ -0,0 +1,4 @@ +linters: + enable: + - unparam + - exportloopref diff --git a/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml b/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml new file mode 100644 index 0000000..22ff440 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml @@ -0,0 +1,43 @@ +project_name: exportloopref +release: + github: + owner: kyoh86 + name: exportloopref +brews: +- install: | + bin.install "exportloopref" + github: + owner: kyoh86 + name: homebrew-tap + folder: Formula + homepage: https://github.com/kyoh86/exportloopref + description: An analyzer that finds exporting pointers for loop variables. +builds: +- goos: + - linux + - darwin + - windows + goarch: + - amd64 + - "386" + main: ./cmd/exportloopref + ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} + binary: exportloopref +archives: +- id: gzip + format: tar.gz + format_overrides: + - goos: windows + format: zip + name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + files: + - licence* + - LICENCE* + - license* + - LICENSE* + - readme* + - README* + - changelog* + - CHANGELOG* +snapshot: + name_template: SNAPSHOT-{{ .Commit }} diff --git a/vendor/github.com/kyoh86/exportloopref/LICENSE b/vendor/github.com/kyoh86/exportloopref/LICENSE new file mode 100644 index 0000000..7ac9dba --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 kyoh86 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/kyoh86/exportloopref/Makefile b/vendor/github.com/kyoh86/exportloopref/Makefile new file mode 100644 index 0000000..4d3ef22 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/Makefile @@ -0,0 +1,16 @@ +.PHONY: gen lint test install man + +VERSION := `git vertag get` +COMMIT := `git rev-parse HEAD` + +gen: + go generate ./... + +lint: gen + golangci-lint run + +test: lint + go test -v --race ./... + +install: test + go install -a -ldflags "-X=main.version=$(VERSION) -X=main.commit=$(COMMIT)" ./... diff --git a/vendor/github.com/kyoh86/exportloopref/README.md b/vendor/github.com/kyoh86/exportloopref/README.md new file mode 100644 index 0000000..5c019c7 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/README.md @@ -0,0 +1,221 @@ +# exportloopref + +An analyzer that finds exporting pointers for loop variables. + +[![PkgGoDev](https://pkg.go.dev/badge/kyoh86/exportloopref)](https://pkg.go.dev/kyoh86/exportloopref) +[![Go Report Card](https://goreportcard.com/badge/github.com/kyoh86/exportloopref)](https://goreportcard.com/report/github.com/kyoh86/exportloopref) +[![Coverage Status](https://img.shields.io/codecov/c/github/kyoh86/exportloopref.svg)](https://codecov.io/gh/kyoh86/exportloopref) +[![Release](https://github.com/kyoh86/exportloopref/workflows/Release/badge.svg)](https://github.com/kyoh86/exportloopref/releases) + +## What's this? + +Sample problem code from: https://github.com/kyoh86/exportloopref/blob/master/testdata/src/simple/simple.go + +```go +package main + +func main() { + var intArray [4]*int + var intSlice []*int + var intRef *int + var intStr struct{ x *int } + + println("loop expecting 10, 11, 12, 13") + for i, p := range []int{10, 11, 12, 13} { + printp(&p) // not a diagnostic + intSlice = append(intSlice, &p) // want "exporting a pointer for the loop variable p" + intArray[i] = &p // want "exporting a pointer for the loop variable p" + if i%2 == 0 { + intRef = &p // want "exporting a pointer for the loop variable p" + intStr.x = &p // want "exporting a pointer for the loop variable p" + } + var vStr struct{ x *int } + var vArray [4]*int + var v *int + if i%2 == 0 { + v = &p // not a diagnostic (x is local variable) + vArray[1] = &p // not a diagnostic (x is local variable) + vStr.x = &p + } + _ = v + } + + println(`slice expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) + for _, p := range intSlice { + printp(p) + } + println(`array expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) + for _, p := range intArray { + printp(p) + } + println(`captured value expecting "12" but "13"`) + printp(intRef) +} + +func printp(p *int) { + println(*p) +} +``` + +In Go, the `p` variable in the above loops is actually a single variable. +So in many case (like the above), using it makes for us annoying bugs. + +You can find them with `exportloopref`, and fix it. + +```go +package main + +func main() { + var intArray [4]*int + var intSlice []*int + var intRef *int + var intStr struct{ x *int } + + println("loop expecting 10, 11, 12, 13") + for i, p := range []int{10, 11, 12, 13} { + p := p // FIX variable into the local variable + printp(&p) + intSlice = append(intSlice, &p) + intArray[i] = &p + if i%2 == 0 { + intRef = &p + intStr.x = &p + } + var vStr struct{ x *int } + var vArray [4]*int + var v *int + if i%2 == 0 { + v = &p + vArray[1] = &p + vStr.x = &p + } + _ = v + } + + println(`slice expecting "10, 11, 12, 13"`) + for _, p := range intSlice { + printp(p) + } + println(`array expecting "10, 11, 12, 13"`) + for _, p := range intArray { + printp(p) + } + println(`captured value expecting "12"`) + printp(intRef) +} + +func printp(p *int) { + println(*p) +} +``` + +ref: https://github.com/kyoh86/exportloopref/blob/master/testdata/src/fixed/fixed.go + +## Sensing policy + +I want to make exportloopref as accurately as possible. +So some cases of lints will be false-negative. + +e.g. + +```go +var s Foo +for _, p := []int{10, 11, 12, 13} { + s.Bar(&p) // If s stores the pointer, it will be bug. +} +``` + +If you want to report all of lints (with some false-positives), +you should use [looppointer](https://github.com/kyoh86/looppointer). + +### Known false negatives + +Case 1: pass the pointer to function to export. + +Case 2: pass the pointer to local variable, and export it. + +```go +package main + +type List []*int + +func (l *List) AppendP(p *int) { + *l = append(*l, p) +} + +func main() { + var slice []*int + list := List{} + + println("loop expect exporting 10, 11, 12, 13") + for _, v := range []int{10, 11, 12, 13} { + list.AppendP(&v) // Case 1: wanted "exporting a pointer for the loop variable v", but cannot be found + + p := &v // p is the local variable + slice = append(slice, p) // Case 2: wanted "exporting a pointer for the loop variable v", but cannot be found + } + + println(`slice expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) + for _, p := range slice { + printp(p) + } + println(`array expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) + for _, p := range ([]*int)(list) { + printp(p) + } +} + +func printp(p *int) { + println(*p) +} +``` + +## Install + +go: + +```console +$ go get github.com/kyoh86/exportloopref/cmd/exportloopref +``` + +[homebrew](https://brew.sh/): + +```console +$ brew install kyoh86/tap/exportloopref +``` + +[gordon](https://github.com/kyoh86/gordon): + +```console +$ gordon install kyoh86/exportloopref +``` + +## Usage + +``` +exportloopref [-flag] [package] +``` + +### Flags + +| Flag | Description | +| --- | --- | +| -V | print version and exit | +| -all | no effect (deprecated) | +| -c int | display offending line with this many lines of context (default -1) | +| -cpuprofile string | write CPU profile to this file | +| -debug string | debug flags, any subset of "fpstv" | +| -fix | apply all suggested fixes | +| -flags | print analyzer flags in JSON | +| -json | emit JSON output | +| -memprofile string | write memory profile to this file | +| -source | no effect (deprecated) | +| -tags string | no effect (deprecated) | +| -trace string | write trace log to this file | +| -v | no effect (deprecated) | + +# LICENSE + +[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg)](http://www.opensource.org/licenses/MIT) + +This is distributed under the [MIT License](http://www.opensource.org/licenses/MIT). diff --git a/vendor/github.com/kyoh86/exportloopref/exportloopref.go b/vendor/github.com/kyoh86/exportloopref/exportloopref.go new file mode 100644 index 0000000..4d1671a --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/exportloopref.go @@ -0,0 +1,305 @@ +package exportloopref + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var Analyzer = &analysis.Analyzer{ + Name: "exportloopref", + Doc: "checks for pointers to enclosing loop variables", + Run: run, + RunDespiteErrors: true, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + // ResultType reflect.Type + // FactTypes []Fact +} + +func init() { + // Analyzer.Flags.StringVar(&v, "name", "default", "description") +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + search := &Searcher{ + Stats: map[token.Pos]struct{}{}, + Vars: map[token.Pos]map[token.Pos]struct{}{}, + Types: pass.TypesInfo.Types, + } + + nodeFilter := []ast.Node{ + (*ast.RangeStmt)(nil), + (*ast.ForStmt)(nil), + (*ast.DeclStmt)(nil), + (*ast.AssignStmt)(nil), + (*ast.UnaryExpr)(nil), + } + + inspect.WithStack(nodeFilter, func(n ast.Node, push bool, stack []ast.Node) bool { + id, insert, digg := search.Check(n, stack) + if id != nil { + dMsg := fmt.Sprintf("exporting a pointer for the loop variable %s", id.Name) + fMsg := fmt.Sprintf("loop variable %s should be pinned", id.Name) + var suggest []analysis.SuggestedFix + if insert != token.NoPos { + suggest = []analysis.SuggestedFix{{ + Message: fMsg, + TextEdits: []analysis.TextEdit{{ + Pos: insert, + End: insert, + NewText: []byte(fmt.Sprintf("%[1]s := %[1]s\n", id.Name)), + }}, + }} + } + d := analysis.Diagnostic{Pos: id.Pos(), + End: id.End(), + Message: dMsg, + Category: "exportloopref", + SuggestedFixes: suggest, + } + pass.Report(d) + } + return digg + }) + + return nil, nil +} + +type Searcher struct { + // Statement variables : map to collect positions that + // variables are declared like below. + // - for , := range ... + // - var int + // - D := ... + Stats map[token.Pos]struct{} + // Local variables maps loop-position, decl-location to ignore + // safe pointers for variable which declared in the loop. + Vars map[token.Pos]map[token.Pos]struct{} + Types map[ast.Expr]types.TypeAndValue +} + +func (s *Searcher) Check(n ast.Node, stack []ast.Node) (*ast.Ident, token.Pos, bool) { + switch typed := n.(type) { + case *ast.RangeStmt: + s.parseRangeStmt(typed) + case *ast.ForStmt: + s.parseForStmt(typed) + case *ast.DeclStmt: + s.parseDeclStmt(typed, stack) + case *ast.AssignStmt: + s.parseAssignStmt(typed, stack) + + case *ast.UnaryExpr: + return s.checkUnaryExpr(typed, stack) + } + return nil, token.NoPos, true +} + +func (s *Searcher) parseRangeStmt(n *ast.RangeStmt) { + s.addStat(n.Key) + s.addStat(n.Value) +} + +func (s *Searcher) parseForStmt(n *ast.ForStmt) { + switch post := n.Post.(type) { + case *ast.AssignStmt: + // e.g. for p = head; p != nil; p = p.next + for _, lhs := range post.Lhs { + s.addStat(lhs) + } + case *ast.IncDecStmt: + // e.g. for i := 0; i < n; i++ + s.addStat(post.X) + } +} + +func (s *Searcher) addStat(expr ast.Expr) { + if id, ok := expr.(*ast.Ident); ok { + s.Stats[id.Pos()] = struct{}{} + } +} + +func (s *Searcher) parseDeclStmt(n *ast.DeclStmt, stack []ast.Node) { + loop, _ := s.innermostLoop(stack) + if loop == nil { + return + } + + // Register declaring variables + if genDecl, ok := n.Decl.(*ast.GenDecl); ok && genDecl.Tok == token.VAR { + for _, spec := range genDecl.Specs { + for _, name := range spec.(*ast.ValueSpec).Names { + s.addVar(loop, name) + } + } + } +} + +func (s *Searcher) parseAssignStmt(n *ast.AssignStmt, stack []ast.Node) { + loop, _ := s.innermostLoop(stack) + if loop == nil { + return + } + + // Find statements declaring local variable + if n.Tok == token.DEFINE { + for _, h := range n.Lhs { + s.addVar(loop, h) + } + } +} + +func (s *Searcher) addVar(loop ast.Node, expr ast.Expr) { + loopPos := loop.Pos() + id, ok := expr.(*ast.Ident) + if !ok { + return + } + vars, ok := s.Vars[loopPos] + if !ok { + vars = map[token.Pos]struct{}{} + } + vars[id.Obj.Pos()] = struct{}{} + s.Vars[loopPos] = vars +} + +func insertionPosition(block *ast.BlockStmt) token.Pos { + if len(block.List) > 0 { + return block.List[0].Pos() + } + return token.NoPos +} + +func (s *Searcher) innermostLoop(stack []ast.Node) (ast.Node, token.Pos) { + for i := len(stack) - 1; i >= 0; i-- { + switch typed := stack[i].(type) { + case *ast.RangeStmt: + return typed, insertionPosition(typed.Body) + case *ast.ForStmt: + return typed, insertionPosition(typed.Body) + } + } + return nil, token.NoPos +} + +func (s *Searcher) checkUnaryExpr(n *ast.UnaryExpr, stack []ast.Node) (*ast.Ident, token.Pos, bool) { + loop, insert := s.innermostLoop(stack) + if loop == nil { + return nil, token.NoPos, true + } + + if n.Op != token.AND { + return nil, token.NoPos, true + } + + // Get identity of the referred item + id := s.getIdentity(n.X) + if id == nil { + return nil, token.NoPos, true + } + + // If the identity is not the loop statement variable, + // it will not be reported. + if _, isStat := s.Stats[id.Obj.Pos()]; !isStat { + return nil, token.NoPos, true + } + + // check stack append(), []X{}, map[Type]X{}, Struct{}, &Struct{}, X.(Type), (X) + // in the = + var mayRHPos token.Pos + for i := len(stack) - 2; i >= 0; i-- { + switch typed := stack[i].(type) { + case (*ast.UnaryExpr): + // noop + case (*ast.CompositeLit): + // noop + case (*ast.KeyValueExpr): + // noop + case (*ast.CallExpr): + fun, ok := typed.Fun.(*ast.Ident) + if !ok { + return nil, token.NoPos, false // it's calling a function other of `append`. It cannot be checked + } + + if fun.Name != "append" { + return nil, token.NoPos, false // it's calling a function other of `append`. It cannot be checked + } + + case (*ast.AssignStmt): + if len(typed.Rhs) != len(typed.Lhs) { + return nil, token.NoPos, false // dead logic + } + + // search x where Rhs[x].Pos() == mayRHPos + var index int + for ri, rh := range typed.Rhs { + if rh.Pos() == mayRHPos { + index = ri + break + } + } + + // check Lhs[x] is not local variable + lh := typed.Lhs[index] + isVar := s.isVar(loop, lh) + if !isVar { + return id, insert, false + } + + return nil, token.NoPos, true + default: + // Other statement is not able to be checked. + return nil, token.NoPos, false + } + + // memory an expr that may be right-hand in the AssignStmt + mayRHPos = stack[i].Pos() + } + return nil, token.NoPos, true +} + +func (s *Searcher) isVar(loop ast.Node, expr ast.Expr) bool { + vars := s.Vars[loop.Pos()] // map[token.Pos]struct{} + if vars == nil { + return false + } + switch typed := expr.(type) { + case (*ast.Ident): + _, isVar := vars[typed.Obj.Pos()] + return isVar + case (*ast.IndexExpr): // like X[Y], check X + return s.isVar(loop, typed.X) + case (*ast.SelectorExpr): // like X.Y, check X + return s.isVar(loop, typed.X) + } + return false +} + +// Get variable identity +func (s *Searcher) getIdentity(expr ast.Expr) *ast.Ident { + switch typed := expr.(type) { + case *ast.SelectorExpr: + // Ignore if the parent is pointer ref (fix for #2) + if _, ok := s.Types[typed.X].Type.(*types.Pointer); ok { + return nil + } + + // Get parent identity; i.e. `a.b` of the `a.b.c`. + return s.getIdentity(typed.X) + + case *ast.Ident: + // Get simple identity; i.e. `a` of the `a`. + if typed.Obj == nil { + return nil + } + return typed + } + return nil +} diff --git a/vendor/github.com/kyoh86/exportloopref/go.mod b/vendor/github.com/kyoh86/exportloopref/go.mod new file mode 100644 index 0000000..34a5398 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/go.mod @@ -0,0 +1,5 @@ +module github.com/kyoh86/exportloopref + +go 1.14 + +require golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa diff --git a/vendor/github.com/kyoh86/exportloopref/go.sum b/vendor/github.com/kyoh86/exportloopref/go.sum new file mode 100644 index 0000000..3b199f0 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/go.sum @@ -0,0 +1,20 @@ +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa h1:mMXQKlWCw9mIWgVLLfiycDZjMHMMYqiuakI4E/l2xcA= +golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/mattn/go-colorable/.travis.yml b/vendor/github.com/mattn/go-colorable/.travis.yml index 98db8f0..7942c56 100644 --- a/vendor/github.com/mattn/go-colorable/.travis.yml +++ b/vendor/github.com/mattn/go-colorable/.travis.yml @@ -1,9 +1,15 @@ language: go +sudo: false go: + - 1.13.x - tip before_install: - - go get github.com/mattn/goveralls - - go get golang.org/x/tools/cmd/cover + - go get -t -v ./... + script: - - $HOME/gopath/bin/goveralls -repotoken xnXqRGwgW3SXIguzxf90ZSK1GPYZPaGrw + - ./go.test.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) + diff --git a/vendor/github.com/mattn/go-colorable/README.md b/vendor/github.com/mattn/go-colorable/README.md index 56729a9..e055952 100644 --- a/vendor/github.com/mattn/go-colorable/README.md +++ b/vendor/github.com/mattn/go-colorable/README.md @@ -1,8 +1,8 @@ # go-colorable -[![Godoc Reference](https://godoc.org/github.com/mattn/go-colorable?status.svg)](http://godoc.org/github.com/mattn/go-colorable) [![Build Status](https://travis-ci.org/mattn/go-colorable.svg?branch=master)](https://travis-ci.org/mattn/go-colorable) -[![Coverage Status](https://coveralls.io/repos/github/mattn/go-colorable/badge.svg?branch=master)](https://coveralls.io/github/mattn/go-colorable?branch=master) +[![Codecov](https://codecov.io/gh/mattn/go-colorable/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-colorable) +[![GoDoc](https://godoc.org/github.com/mattn/go-colorable?status.svg)](http://godoc.org/github.com/mattn/go-colorable) [![Go Report Card](https://goreportcard.com/badge/mattn/go-colorable)](https://goreportcard.com/report/mattn/go-colorable) Colorable writer for windows. diff --git a/vendor/github.com/mattn/go-colorable/colorable_appengine.go b/vendor/github.com/mattn/go-colorable/colorable_appengine.go index 0b0aef8..1f7806f 100644 --- a/vendor/github.com/mattn/go-colorable/colorable_appengine.go +++ b/vendor/github.com/mattn/go-colorable/colorable_appengine.go @@ -27,3 +27,11 @@ func NewColorableStdout() io.Writer { func NewColorableStderr() io.Writer { return os.Stderr } + +// EnableColorsStdout enable colors if possible. +func EnableColorsStdout(enabled *bool) func() { + if enabled != nil { + *enabled = true + } + return func() {} +} diff --git a/vendor/github.com/mattn/go-colorable/colorable_others.go b/vendor/github.com/mattn/go-colorable/colorable_others.go index 3fb771d..08cbd1e 100644 --- a/vendor/github.com/mattn/go-colorable/colorable_others.go +++ b/vendor/github.com/mattn/go-colorable/colorable_others.go @@ -28,3 +28,11 @@ func NewColorableStdout() io.Writer { func NewColorableStderr() io.Writer { return os.Stderr } + +// EnableColorsStdout enable colors if possible. +func EnableColorsStdout(enabled *bool) func() { + if enabled != nil { + *enabled = true + } + return func() {} +} diff --git a/vendor/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/mattn/go-colorable/colorable_windows.go index 1bd628f..41215d7 100644 --- a/vendor/github.com/mattn/go-colorable/colorable_windows.go +++ b/vendor/github.com/mattn/go-colorable/colorable_windows.go @@ -10,6 +10,7 @@ import ( "os" "strconv" "strings" + "sync" "syscall" "unsafe" @@ -27,6 +28,9 @@ const ( backgroundRed = 0x40 backgroundIntensity = 0x80 backgroundMask = (backgroundRed | backgroundBlue | backgroundGreen | backgroundIntensity) + commonLvbUnderscore = 0x8000 + + cENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4 ) const ( @@ -78,6 +82,8 @@ var ( procGetConsoleCursorInfo = kernel32.NewProc("GetConsoleCursorInfo") procSetConsoleCursorInfo = kernel32.NewProc("SetConsoleCursorInfo") procSetConsoleTitle = kernel32.NewProc("SetConsoleTitleW") + procGetConsoleMode = kernel32.NewProc("GetConsoleMode") + procSetConsoleMode = kernel32.NewProc("SetConsoleMode") procCreateConsoleScreenBuffer = kernel32.NewProc("CreateConsoleScreenBuffer") ) @@ -89,6 +95,7 @@ type Writer struct { oldattr word oldpos coord rest bytes.Buffer + mutex sync.Mutex } // NewColorable returns new instance of Writer which handles escape sequence from File. @@ -98,6 +105,10 @@ func NewColorable(file *os.File) io.Writer { } if isatty.IsTerminal(file.Fd()) { + var mode uint32 + if r, _, _ := procGetConsoleMode.Call(file.Fd(), uintptr(unsafe.Pointer(&mode))); r != 0 && mode&cENABLE_VIRTUAL_TERMINAL_PROCESSING != 0 { + return file + } var csbi consoleScreenBufferInfo handle := syscall.Handle(file.Fd()) procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) @@ -424,6 +435,8 @@ func atoiWithDefault(s string, def int) (int, error) { // Write writes data on console func (w *Writer) Write(data []byte) (n int, err error) { + w.mutex.Lock() + defer w.mutex.Unlock() var csbi consoleScreenBufferInfo procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi))) @@ -675,14 +688,19 @@ loop: switch { case n == 0 || n == 100: attr = w.oldattr - case 1 <= n && n <= 5: - attr |= foregroundIntensity - case n == 7: - attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4) - case n == 22 || n == 25: + case n == 4: + attr |= commonLvbUnderscore + case (1 <= n && n <= 3) || n == 5: attr |= foregroundIntensity - case n == 27: - attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4) + case n == 7 || n == 27: + attr = + (attr &^ (foregroundMask | backgroundMask)) | + ((attr & foregroundMask) << 4) | + ((attr & backgroundMask) >> 4) + case n == 22: + attr &^= foregroundIntensity + case n == 24: + attr &^= commonLvbUnderscore case 30 <= n && n <= 37: attr &= backgroundMask if (n-30)&1 != 0 { @@ -701,7 +719,7 @@ loop: n256setup() } attr &= backgroundMask - attr |= n256foreAttr[n256] + attr |= n256foreAttr[n256%len(n256foreAttr)] i += 2 } } else if len(token) == 5 && token[i+1] == "2" { @@ -743,7 +761,7 @@ loop: n256setup() } attr &= foregroundMask - attr |= n256backAttr[n256] + attr |= n256backAttr[n256%len(n256backAttr)] i += 2 } } else if len(token) == 5 && token[i+1] == "2" { @@ -1003,3 +1021,23 @@ func n256setup() { n256backAttr[i] = c.backgroundAttr() } } + +// EnableColorsStdout enable colors if possible. +func EnableColorsStdout(enabled *bool) func() { + var mode uint32 + h := os.Stdout.Fd() + if r, _, _ := procGetConsoleMode.Call(h, uintptr(unsafe.Pointer(&mode))); r != 0 { + if r, _, _ = procSetConsoleMode.Call(h, uintptr(mode|cENABLE_VIRTUAL_TERMINAL_PROCESSING)); r != 0 { + if enabled != nil { + *enabled = true + } + return func() { + procSetConsoleMode.Call(h, uintptr(mode)) + } + } + } + if enabled != nil { + *enabled = true + } + return func() {} +} diff --git a/vendor/github.com/mattn/go-colorable/go.mod b/vendor/github.com/mattn/go-colorable/go.mod index ef3ca9d..1e590b8 100644 --- a/vendor/github.com/mattn/go-colorable/go.mod +++ b/vendor/github.com/mattn/go-colorable/go.mod @@ -1,3 +1,8 @@ module github.com/mattn/go-colorable -require github.com/mattn/go-isatty v0.0.8 +require ( + github.com/mattn/go-isatty v0.0.12 + golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae // indirect +) + +go 1.13 diff --git a/vendor/github.com/mattn/go-colorable/go.sum b/vendor/github.com/mattn/go-colorable/go.sum index 2c12960..cf5b95d 100644 --- a/vendor/github.com/mattn/go-colorable/go.sum +++ b/vendor/github.com/mattn/go-colorable/go.sum @@ -1,4 +1,5 @@ -github.com/mattn/go-isatty v0.0.5 h1:tHXDdz1cpzGaovsTB+TVB8q90WEokoVmfMqoVcrLUgw= -github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae h1:/WDfKMnPU+m5M4xB+6x4kaepxRw6jWvR5iDRdvjHgy8= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/vendor/github.com/mattn/go-colorable/go.test.sh b/vendor/github.com/mattn/go-colorable/go.test.sh new file mode 100644 index 0000000..012162b --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/go.test.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e +echo "" > coverage.txt + +for d in $(go list ./... | grep -v vendor); do + go test -race -coverprofile=profile.out -covermode=atomic "$d" + if [ -f profile.out ]; then + cat profile.out >> coverage.txt + rm profile.out + fi +done diff --git a/vendor/github.com/mattn/go-isatty/.travis.yml b/vendor/github.com/mattn/go-isatty/.travis.yml index 5597e02..604314d 100644 --- a/vendor/github.com/mattn/go-isatty/.travis.yml +++ b/vendor/github.com/mattn/go-isatty/.travis.yml @@ -1,13 +1,14 @@ language: go +sudo: false go: + - 1.13.x - tip -os: - - linux - - osx - before_install: - - go get github.com/mattn/goveralls - - go get golang.org/x/tools/cmd/cover + - go get -t -v ./... + script: - - $HOME/gopath/bin/goveralls -repotoken 3gHdORO5k5ziZcWMBxnd9LrMZaJs8m9x5 + - ./go.test.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/mattn/go-isatty/README.md b/vendor/github.com/mattn/go-isatty/README.md index 1e69004..3841835 100644 --- a/vendor/github.com/mattn/go-isatty/README.md +++ b/vendor/github.com/mattn/go-isatty/README.md @@ -1,7 +1,7 @@ # go-isatty [![Godoc Reference](https://godoc.org/github.com/mattn/go-isatty?status.svg)](http://godoc.org/github.com/mattn/go-isatty) -[![Build Status](https://travis-ci.org/mattn/go-isatty.svg?branch=master)](https://travis-ci.org/mattn/go-isatty) +[![Codecov](https://codecov.io/gh/mattn/go-isatty/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-isatty) [![Coverage Status](https://coveralls.io/repos/github/mattn/go-isatty/badge.svg?branch=master)](https://coveralls.io/github/mattn/go-isatty?branch=master) [![Go Report Card](https://goreportcard.com/badge/mattn/go-isatty)](https://goreportcard.com/report/mattn/go-isatty) diff --git a/vendor/github.com/mattn/go-isatty/go.mod b/vendor/github.com/mattn/go-isatty/go.mod index a8ddf40..605c4c2 100644 --- a/vendor/github.com/mattn/go-isatty/go.mod +++ b/vendor/github.com/mattn/go-isatty/go.mod @@ -1,5 +1,5 @@ module github.com/mattn/go-isatty -require golang.org/x/sys v0.0.0-20191008105621-543471e840be +go 1.12 -go 1.14 +require golang.org/x/sys v0.0.0-20200116001909-b77594299b42 diff --git a/vendor/github.com/mattn/go-isatty/go.sum b/vendor/github.com/mattn/go-isatty/go.sum index c141fc5..912e29c 100644 --- a/vendor/github.com/mattn/go-isatty/go.sum +++ b/vendor/github.com/mattn/go-isatty/go.sum @@ -1,4 +1,2 @@ -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a h1:aYOabOQFp6Vj6W1F80affTUvO9UxmJRx8K0gsfABByQ= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191008105621-543471e840be h1:QAcqgptGM8IQBC9K/RC4o+O9YmqEm0diQn9QmZw/0mU= -golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/vendor/github.com/mattn/go-isatty/go.test.sh b/vendor/github.com/mattn/go-isatty/go.test.sh new file mode 100644 index 0000000..012162b --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/go.test.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e +echo "" > coverage.txt + +for d in $(go list ./... | grep -v vendor); do + go test -race -coverprofile=profile.out -covermode=atomic "$d" + if [ -f profile.out ]; then + cat profile.out >> coverage.txt + rm profile.out + fi +done diff --git a/vendor/github.com/mattn/go-isatty/isatty_android.go b/vendor/github.com/mattn/go-isatty/isatty_android.go deleted file mode 100644 index d3567cb..0000000 --- a/vendor/github.com/mattn/go-isatty/isatty_android.go +++ /dev/null @@ -1,23 +0,0 @@ -// +build android - -package isatty - -import ( - "syscall" - "unsafe" -) - -const ioctlReadTermios = syscall.TCGETS - -// IsTerminal return true if the file descriptor is terminal. -func IsTerminal(fd uintptr) bool { - var termios syscall.Termios - _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0) - return err == 0 -} - -// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 -// terminal. This is also always false on this environment. -func IsCygwinTerminal(fd uintptr) bool { - return false -} diff --git a/vendor/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/mattn/go-isatty/isatty_bsd.go index 07e9303..711f288 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_bsd.go +++ b/vendor/github.com/mattn/go-isatty/isatty_bsd.go @@ -3,18 +3,12 @@ package isatty -import ( - "syscall" - "unsafe" -) - -const ioctlReadTermios = syscall.TIOCGETA +import "golang.org/x/sys/unix" // IsTerminal return true if the file descriptor is terminal. func IsTerminal(fd uintptr) bool { - var termios syscall.Termios - _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0) - return err == 0 + _, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA) + return err == nil } // IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 diff --git a/vendor/github.com/mattn/go-isatty/isatty_plan9.go b/vendor/github.com/mattn/go-isatty/isatty_plan9.go index bc0a709..c5b6e0c 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_plan9.go +++ b/vendor/github.com/mattn/go-isatty/isatty_plan9.go @@ -8,7 +8,7 @@ import ( // IsTerminal returns true if the given file descriptor is a terminal. func IsTerminal(fd uintptr) bool { - path, err := syscall.Fd2path(fd) + path, err := syscall.Fd2path(int(fd)) if err != nil { return false } diff --git a/vendor/github.com/mattn/go-isatty/isatty_tcgets.go b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go index 453b025..31a1ca9 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_tcgets.go +++ b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go @@ -1,6 +1,5 @@ // +build linux aix // +build !appengine -// +build !android package isatty diff --git a/vendor/github.com/mattn/go-isatty/renovate.json b/vendor/github.com/mattn/go-isatty/renovate.json new file mode 100644 index 0000000..5ae9d96 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/renovate.json @@ -0,0 +1,8 @@ +{ + "extends": [ + "config:base" + ], + "postUpdateOptions": [ + "gomodTidy" + ] +} diff --git a/vendor/github.com/mbilski/exhaustivestruct/LICENSE b/vendor/github.com/mbilski/exhaustivestruct/LICENSE new file mode 100644 index 0000000..893eb73 --- /dev/null +++ b/vendor/github.com/mbilski/exhaustivestruct/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Mateusz Bilski + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go b/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go new file mode 100644 index 0000000..4936842 --- /dev/null +++ b/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go @@ -0,0 +1,102 @@ +package analyzer + +import ( + "go/ast" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + + "golang.org/x/tools/go/analysis" +) + +// Analyzer that checks if all struct's fields are initialized +var Analyzer = &analysis.Analyzer{ + Name: "exhaustivestruct", + Doc: "Checks if all struct's fields are initialized", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CompositeLit)(nil), + } + + inspector.Preorder(nodeFilter, func(node ast.Node) { + var name string + + compositeLit := node.(*ast.CompositeLit) + + i, ok := compositeLit.Type.(*ast.Ident) + + if ok { + name = i.Name + } else { + s, ok := compositeLit.Type.(*ast.SelectorExpr) + + if !ok { + return + } + + name = s.Sel.Name + } + + if compositeLit.Type == nil { + return + } + + t := pass.TypesInfo.TypeOf(compositeLit.Type) + + if t == nil { + return + } + + str, ok := t.Underlying().(*types.Struct) + + if !ok { + return + } + + if len(compositeLit.Elts) == 0 { + return + } + + missing := []string{} + + for i := 0; i < str.NumFields(); i++ { + fieldName := str.Field(i).Name() + exists := false + + if !str.Field(i).Exported() { + continue + } + + for _, e := range compositeLit.Elts { + if k, ok := e.(*ast.KeyValueExpr); ok { + if i, ok := k.Key.(*ast.Ident); ok { + if i.Name == fieldName { + exists = true + break + } + } + } + } + + if !exists { + missing = append(missing, fieldName) + } + } + + if len(missing) == 1 { + pass.Reportf(node.Pos(), "%s is missing in %s", missing[0], name) + } else if len(missing) > 1 { + pass.Reportf(node.Pos(), "%s are missing in %s", strings.Join(missing, ", "), name) + } + }) + + return nil, nil +} diff --git a/vendor/github.com/moricho/tparallel/.gitignore b/vendor/github.com/moricho/tparallel/.gitignore new file mode 100644 index 0000000..7134228 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/.gitignore @@ -0,0 +1,3 @@ +/tparallel +.envrc +/dist diff --git a/vendor/github.com/moricho/tparallel/.goreleaser.yml b/vendor/github.com/moricho/tparallel/.goreleaser.yml new file mode 100644 index 0000000..e9f6d72 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/.goreleaser.yml @@ -0,0 +1,38 @@ +project_name: tparallel +env: + - GO111MODULE=on +before: + hooks: + - go mod tidy +builds: + - main: ./cmd/tparallel + binary: tparallel + ldflags: + - -s -w + - -X main.Version={{.Version}} + - -X main.Revision={{.ShortCommit}} + env: + - CGO_ENABLED=0 +archives: + - name_template: '{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' + replacements: + darwin: darwin + linux: linux + windows: windows + 386: i386 + amd64: x86_64 + format_overrides: + - goos: windows + format: zip +release: + prerelease: auto +brews: + - tap: + owner: moricho + name: homebrew-tparallel + homepage: https://github.com/moricho/tparallel + description: tparallel detects inappropriate usage of t.Parallel() method in your Go test codes + install: | + bin.install "tparallel" + test: | + system "#{bin}/goreleaser -v" diff --git a/vendor/github.com/moricho/tparallel/LICENSE b/vendor/github.com/moricho/tparallel/LICENSE new file mode 100644 index 0000000..4f02998 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 moricho + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/moricho/tparallel/Makefile b/vendor/github.com/moricho/tparallel/Makefile new file mode 100644 index 0000000..fb35880 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/Makefile @@ -0,0 +1,13 @@ +all: build + +.PHONY: build +build: + go build -o tparallel ./cmd/tparallel + +.PHONY: build_race +build_race: + go build -race -o tparallel ./cmd/tparallel + +.PHONY: test +test: build_race + go test -v ./... diff --git a/vendor/github.com/moricho/tparallel/README.md b/vendor/github.com/moricho/tparallel/README.md new file mode 100644 index 0000000..cd358d1 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/README.md @@ -0,0 +1,100 @@ +# tparallel +[![tparallel](https://github.com/moricho/tparallel/workflows/tparallel/badge.svg?branch=master)](https://github.com/moricho/tparallel/actions) +[![Go Report Card](https://goreportcard.com/badge/github.com/moricho/tparallel)](https://goreportcard.com/report/github.com/moricho/tparallel) +[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) + +`tparallel` finds inappropriate usage of `t.Parallel()` method in your Go test codes. +It detects the following: +- `t.Parallel()` is called in either a top-level test function or a sub-test function only +- Although `t.Parallel()` is called in the sub-test function, it is post-processed by `defer` instead of `t.Cleanup()` + +This tool was inspired by this blog: [Go言語でのテストの並列化 〜t.Parallel()メソッドを理解する〜](https://engineering.mercari.com/blog/entry/how_to_use_t_parallel/) + +## Installation + +### From GitHub Releases +Please see [GitHub Releases](https://github.com/moricho/tparallel/releases). +Available binaries are: +- macOS +- Linux +- Windows + +### macOS +``` sh +$ brew tap moricho/tparallel +$ brew install tparallel +``` + +### go get +```sh +$ go get -u github.com/moricho/tparallel/cmd/tparallel +``` + +## Usage + +```sh +$ go vet -vettool=`which tparallel` +``` + +## Example + +```go +package sample + +import ( + "testing" +) + +func Test_Table1(t *testing.T) { + teardown := setup("Test_Table1") + defer teardown() + + tests := []struct { + name string + }{ + { + name: "Table1_Sub1", + }, + { + name: "Table1_Sub2", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + call(tt.name) + }) + } +} + +func Test_Table2(t *testing.T) { + teardown := setup("Test_Table2") + t.Cleanup(teardown) + t.Parallel() + + tests := []struct { + name string + }{ + { + name: "Table2_Sub1", + }, + { + name: "Table2_Sub2", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + call(tt.name) + }) + } +} +``` + +```console +# github.com/moricho/tparallel/testdata/src/sample +testdata/src/sample/table_test.go:7:6: Test_Table1 should use t.Cleanup +testdata/src/sample/table_test.go:7:6: Test_Table1 should call t.Parallel on the top level as well as its subtests +testdata/src/sample/table_test.go:30:6: Test_Table2's subtests should call t.Parallel +``` diff --git a/vendor/github.com/moricho/tparallel/go.mod b/vendor/github.com/moricho/tparallel/go.mod new file mode 100644 index 0000000..9947ccb --- /dev/null +++ b/vendor/github.com/moricho/tparallel/go.mod @@ -0,0 +1,8 @@ +module github.com/moricho/tparallel + +go 1.15 + +require ( + github.com/gostaticanalysis/analysisutil v0.1.0 + golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65 +) diff --git a/vendor/github.com/moricho/tparallel/go.sum b/vendor/github.com/moricho/tparallel/go.sum new file mode 100644 index 0000000..bcc4158 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/go.sum @@ -0,0 +1,34 @@ +github.com/gostaticanalysis/analysisutil v0.1.0 h1:E4c8Y1EQURbBEAHoXc/jBTK7Np14ArT8NPUiSFOl9yc= +github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= +github.com/gostaticanalysis/comment v1.3.0 h1:wTVgynbFu8/nz6SGgywA0TcyIoAVsYc7ai/Zp5xNGlw= +github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65 h1:DajXNh69ob79PCQz1N7OHxmqq6ASZC5xAnJJWIQGR6I= +golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go b/vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go new file mode 100644 index 0000000..5a8e637 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go @@ -0,0 +1,34 @@ +package ssafunc + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + "github.com/moricho/tparallel/pkg/ssainstr" + "golang.org/x/tools/go/ssa" +) + +// IsDeferCalled returns whether the given ssa.Function calls `defer` +func IsDeferCalled(f *ssa.Function) bool { + for _, block := range f.Blocks { + for _, instr := range block.Instrs { + switch instr.(type) { + case *ssa.Defer: + return true + } + } + } + return false +} + +// IsCalled returns whether the given ssa.Function calls `fn` func +func IsCalled(f *ssa.Function, fn *types.Func) bool { + block := f.Blocks[0] + for _, instr := range block.Instrs { + called := analysisutil.Called(instr, nil, fn) + if _, ok := ssainstr.LookupCalled(instr, fn); ok || called { + return true + } + } + return false +} diff --git a/vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go b/vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go new file mode 100644 index 0000000..374553f --- /dev/null +++ b/vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go @@ -0,0 +1,63 @@ +package ssainstr + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/ssa" +) + +// LookupCalled looks up ssa.Instruction that call the `fn` func in the given instr +func LookupCalled(instr ssa.Instruction, fn *types.Func) ([]ssa.Instruction, bool) { + instrs := []ssa.Instruction{} + + call, ok := instr.(ssa.CallInstruction) + if !ok { + return instrs, false + } + + ssaCall := call.Value() + if ssaCall == nil { + return instrs, false + } + common := ssaCall.Common() + if common == nil { + return instrs, false + } + val := common.Value + + called := false + switch fnval := val.(type) { + case *ssa.Function: + for _, block := range fnval.Blocks { + for _, instr := range block.Instrs { + if analysisutil.Called(instr, nil, fn) { + called = true + instrs = append(instrs, instr) + } + } + } + } + + return instrs, called +} + +// HasArgs returns whether the given ssa.Instruction has `typ` type args +func HasArgs(instr ssa.Instruction, typ types.Type) bool { + call, ok := instr.(ssa.CallInstruction) + if !ok { + return false + } + + ssaCall := call.Value() + if ssaCall == nil { + return false + } + + for _, arg := range ssaCall.Call.Args { + if types.Identical(arg.Type(), typ) { + return true + } + } + return false +} diff --git a/vendor/github.com/moricho/tparallel/testmap.go b/vendor/github.com/moricho/tparallel/testmap.go new file mode 100644 index 0000000..fa9bed7 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/testmap.go @@ -0,0 +1,63 @@ +package tparallel + +import ( + "go/types" + "strings" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" + + "github.com/moricho/tparallel/pkg/ssainstr" +) + +// getTestMap gets a set of a top-level test and its sub-tests +func getTestMap(ssaanalyzer *buildssa.SSA, testTyp types.Type) map[*ssa.Function][]*ssa.Function { + testMap := map[*ssa.Function][]*ssa.Function{} + + trun := analysisutil.MethodOf(testTyp, "Run") + for _, f := range ssaanalyzer.SrcFuncs { + if !strings.HasPrefix(f.Name(), "Test") || !(f.Parent() == (*ssa.Function)(nil)) { + continue + } + testMap[f] = []*ssa.Function{} + for _, block := range f.Blocks { + for _, instr := range block.Instrs { + called := analysisutil.Called(instr, nil, trun) + + if !called && ssainstr.HasArgs(instr, types.NewPointer(testTyp)) { + if instrs, ok := ssainstr.LookupCalled(instr, trun); ok { + for _, v := range instrs { + testMap[f] = appendTestMap(testMap[f], v) + } + } + } else if called { + testMap[f] = appendTestMap(testMap[f], instr) + } + } + } + } + + return testMap +} + +// appendTestMap converts ssa.Instruction to ssa.Function and append it to a given sub-test slice +func appendTestMap(subtests []*ssa.Function, instr ssa.Instruction) []*ssa.Function { + call, ok := instr.(ssa.CallInstruction) + if !ok { + return subtests + } + + ssaCall := call.Value() + for _, arg := range ssaCall.Call.Args { + switch arg := arg.(type) { + case *ssa.Function: + subtests = append(subtests, arg) + case *ssa.MakeClosure: + fn, _ := arg.Fn.(*ssa.Function) + subtests = append(subtests, fn) + } + } + + return subtests +} diff --git a/vendor/github.com/moricho/tparallel/tparallel.go b/vendor/github.com/moricho/tparallel/tparallel.go new file mode 100644 index 0000000..3139e04 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/tparallel.go @@ -0,0 +1,72 @@ +package tparallel + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + + "github.com/moricho/tparallel/pkg/ssafunc" +) + +const doc = "tparallel detects inappropriate usage of t.Parallel() method in your Go test codes." + +// Analyzer analyzes Go test codes whether they use t.Parallel() appropriately +// by using SSA (Single Static Assignment) +var Analyzer = &analysis.Analyzer{ + Name: "tparallel", + Doc: doc, + Run: run, + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + }, +} + +func run(pass *analysis.Pass) (interface{}, error) { + ssaanalyzer := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + + obj := analysisutil.ObjectOf(pass, "testing", "T") + if obj == nil { + // skip checking + return nil, nil + } + testTyp, testPkg := obj.Type(), obj.Pkg() + + p, _, _ := types.LookupFieldOrMethod(testTyp, true, testPkg, "Parallel") + parallel, _ := p.(*types.Func) + c, _, _ := types.LookupFieldOrMethod(testTyp, true, testPkg, "Cleanup") + cleanup, _ := c.(*types.Func) + + testMap := getTestMap(ssaanalyzer, testTyp) // ex. {Test1: [TestSub1, TestSub2], Test2: [TestSub1, TestSub2, TestSub3], ...} + for top, subs := range testMap { + if len(subs) == 0 { + continue + } + isParallelTop := ssafunc.IsCalled(top, parallel) + isPararellSub := false + for _, sub := range subs { + isPararellSub = ssafunc.IsCalled(sub, parallel) + if isPararellSub { + break + } + } + + if ssafunc.IsDeferCalled(top) { + useCleanup := ssafunc.IsCalled(top, cleanup) + if isPararellSub && !useCleanup { + pass.Reportf(top.Pos(), "%s should use t.Cleanup instead of defer", top.Name()) + } + } + + if isParallelTop == isPararellSub { + continue + } else if isPararellSub { + pass.Reportf(top.Pos(), "%s should call t.Parallel on the top level as well as its subtests", top.Name()) + } else if isParallelTop { + pass.Reportf(top.Pos(), "%s's subtests should call t.Parallel", top.Name()) + } + } + + return nil, nil +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/Gopkg.lock b/vendor/github.com/nbutton23/zxcvbn-go/Gopkg.lock deleted file mode 100644 index b5a62a3..0000000 --- a/vendor/github.com/nbutton23/zxcvbn-go/Gopkg.lock +++ /dev/null @@ -1,27 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - name = "github.com/davecgh/go-spew" - packages = ["spew"] - revision = "346938d642f2ec3594ed81d874461961cd0faa76" - version = "v1.1.0" - -[[projects]] - name = "github.com/pmezard/go-difflib" - packages = ["difflib"] - revision = "792786c7400a136282c1664665ae0a8db921c6c2" - version = "v1.0.0" - -[[projects]] - name = "github.com/stretchr/testify" - packages = ["assert"] - revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0" - version = "v1.1.4" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - inputs-digest = "b750880cdc8ce044e6f9bf3b331d8a392471c328107b8c3d42e3e11022d76858" - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/vendor/github.com/nbutton23/zxcvbn-go/Gopkg.toml b/vendor/github.com/nbutton23/zxcvbn-go/Gopkg.toml deleted file mode 100644 index c59386c..0000000 --- a/vendor/github.com/nbutton23/zxcvbn-go/Gopkg.toml +++ /dev/null @@ -1,26 +0,0 @@ - -# Gopkg.toml example -# -# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md -# for detailed Gopkg.toml documentation. -# -# required = ["github.com/user/thing/cmd/thing"] -# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] -# -# [[constraint]] -# name = "github.com/user/project" -# version = "1.0.0" -# -# [[constraint]] -# name = "github.com/user/project2" -# branch = "dev" -# source = "github.com/myfork/project2" -# -# [[override]] -# name = "github.com/x/y" -# version = "2.4.0" - - -[[constraint]] - name = "github.com/stretchr/testify" - version = "1.1.4" diff --git a/vendor/github.com/nbutton23/zxcvbn-go/go.mod b/vendor/github.com/nbutton23/zxcvbn-go/go.mod new file mode 100644 index 0000000..61b9a67 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/go.mod @@ -0,0 +1,9 @@ +module github.com/nbutton23/zxcvbn-go + +go 1.14 + +require ( + github.com/davecgh/go-spew v1.1.0 + github.com/pmezard/go-difflib v1.0.0 + github.com/stretchr/testify v1.1.4 +) diff --git a/vendor/github.com/nbutton23/zxcvbn-go/go.sum b/vendor/github.com/nbutton23/zxcvbn-go/go.sum new file mode 100644 index 0000000..656d004 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/go.sum @@ -0,0 +1,5 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.1.4 h1:ToftOQTytwshuOSj6bDSolVUa3GINfJP/fg3OkkOzQQ= +github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= diff --git a/vendor/github.com/nishanths/exhaustive/.gitignore b/vendor/github.com/nishanths/exhaustive/.gitignore new file mode 100644 index 0000000..24bde53 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/.gitignore @@ -0,0 +1,7 @@ +.DS_Store +*.swp +tags + +# binary +cmd/exhaustive/exhaustive +exhaustive diff --git a/vendor/github.com/nishanths/exhaustive/.travis.yml b/vendor/github.com/nishanths/exhaustive/.travis.yml new file mode 100644 index 0000000..bd342f5 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/.travis.yml @@ -0,0 +1,12 @@ +language: go + +go: + - 1.x + - master + +# Only clone the most recent commit. +git: + depth: 1 + +notifications: + email: false diff --git a/vendor/github.com/go-lintpack/lintpack/LICENSE b/vendor/github.com/nishanths/exhaustive/LICENSE similarity index 55% rename from vendor/github.com/go-lintpack/lintpack/LICENSE rename to vendor/github.com/nishanths/exhaustive/LICENSE index de0abcc..32befa6 100644 --- a/vendor/github.com/go-lintpack/lintpack/LICENSE +++ b/vendor/github.com/nishanths/exhaustive/LICENSE @@ -1,21 +1,22 @@ -Copyright (c) 2018, go-lintpack maintainers +BSD 2-Clause License + +Copyright (c) 2020, Nishanth Shanmugham +All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Intel Corporation nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER diff --git a/vendor/github.com/nishanths/exhaustive/README.md b/vendor/github.com/nishanths/exhaustive/README.md new file mode 100644 index 0000000..90afc87 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/README.md @@ -0,0 +1,91 @@ +# exhaustive + +[![Godoc](https://godoc.org/github.com/nishanths/exhaustive?status.svg)](https://godoc.org/github.com/nishanths/exhaustive) + +[![Build Status](https://travis-ci.org/nishanths/exhaustive.svg?branch=master)](https://travis-ci.org/nishanths/exhaustive) + +The `exhaustive` package and command line program can be used to detect +enum switch statements that are not exhaustive. + +An enum switch statement is exhaustive if it has cases for each of the enum's members. See godoc for the definition of enum used by the program. + +The `exhaustive` package provides an `Analyzer` that follows the guidelines +described in the [go/analysis](https://godoc.org/golang.org/x/tools/go/analysis) package; this makes +it possible to integrate into existing analysis driver programs. + +## Install + +``` +go get github.com/nishanths/exhaustive/... +``` + +## Docs + +https://godoc.org/github.com/nishanths/exhaustive + +## Usage + +The command line usage is: + +``` +Usage: exhaustive [-flags] [packages...] + +Flags: + -check-generated + check switch statements in generated files also + -default-signifies-exhaustive + indicates that switch statements are to be considered exhaustive if a 'default' case + is present, even if all enum members aren't listed in the switch (default false) + -fix + apply all suggested fixes (default false) + +Examples: + exhaustive github.com/foo/bar/... + exhaustive github.com/a/b github.com/x/y +``` + +## Example + +Given the code: + +```diff +package token + +type Token int + +const ( + Add Token = iota + Subtract + Multiply ++ Quotient ++ Remainder +) +``` +``` +package calc + +import "token" + +func processToken(t token.Token) { + switch t { + case token.Add: + ... + case token.Subtract: + ... + case token.Multiply: + ... + } +} +``` + +Running the `exhaustive` command will print: + +``` +calc.go:6:2: missing cases in switch of type token.Token: Quotient, Remainder +``` + +Enums can also be defined using explicit constant values instead of `iota`. + +## License + +BSD 2-Clause diff --git a/vendor/github.com/nishanths/exhaustive/enum.go b/vendor/github.com/nishanths/exhaustive/enum.go new file mode 100644 index 0000000..ed0df64 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/enum.go @@ -0,0 +1,146 @@ +package exhaustive + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" +) + +type enums map[string]*enumMembers // enum type name -> enum members + +type enumMembers struct { + // Names in the order encountered in the AST. + OrderedNames []string + + // Maps name -> (constant.Value).ExactString(). + // If a name is missing in the map, it means that it does not have a + // corresponding constant.Value defined in the AST. + NameToValue map[string]string + + // Maps (constant.Value).ExactString() -> names. + // Names that don't have a constant.Value defined in the AST (e.g., some + // iota constants) will not have a corresponding entry in this map. + ValueToNames map[string][]string +} + +func (em *enumMembers) add(name string, constVal *string) { + em.OrderedNames = append(em.OrderedNames, name) + + if constVal != nil { + if em.NameToValue == nil { + em.NameToValue = make(map[string]string) + } + em.NameToValue[name] = *constVal + + if em.ValueToNames == nil { + em.ValueToNames = make(map[string][]string) + } + em.ValueToNames[*constVal] = append(em.ValueToNames[*constVal], name) + } +} + +func (em *enumMembers) numMembers() int { + return len(em.OrderedNames) +} + +func findEnums(pass *analysis.Pass) enums { + pkgEnums := make(enums) + + // Gather enum types. + for _, f := range pass.Files { + for _, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + if gen.Tok != token.TYPE { + continue + } + for _, s := range gen.Specs { + // Must be TypeSpec since we've filtered on token.TYPE. + t, ok := s.(*ast.TypeSpec) + obj := pass.TypesInfo.Defs[t.Name] + if obj == nil { + continue + } + + named, ok := obj.Type().(*types.Named) + if !ok { + continue + } + basic, ok := named.Underlying().(*types.Basic) + if !ok { + continue + } + + switch i := basic.Info(); { + case i&types.IsInteger != 0: + pkgEnums[named.Obj().Name()] = &enumMembers{} + case i&types.IsFloat != 0: + pkgEnums[named.Obj().Name()] = &enumMembers{} + case i&types.IsString != 0: + pkgEnums[named.Obj().Name()] = &enumMembers{} + } + } + } + } + + // Gather enum members. + for _, f := range pass.Files { + for _, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + if gen.Tok != token.CONST && gen.Tok != token.VAR { + continue + } + for _, s := range gen.Specs { + // Must be ValueSpec since we've filtered on token.CONST, token.VAR. + v := s.(*ast.ValueSpec) + for i, name := range v.Names { + obj := pass.TypesInfo.Defs[name] + if obj == nil { + continue + } + + named, ok := obj.Type().(*types.Named) + if !ok { + continue + } + + // Get the constant.Value representation, if any. + var constVal *string + if len(v.Values) > i { + value := v.Values[i] + if con, ok := pass.TypesInfo.Types[value]; ok && con.Value != nil { + str := con.Value.ExactString() // temp var to be able to take address + constVal = &str + } + } + + em, ok := pkgEnums[named.Obj().Name()] + if !ok { + continue + } + em.add(obj.Name(), constVal) + pkgEnums[named.Obj().Name()] = em + } + } + } + } + + // Delete member-less enum types. + // We can't call these enums, since we can't be sure without + // the existence of members. (The type may just be a named type, + // for instance.) + for k, v := range pkgEnums { + if v.numMembers() == 0 { + delete(pkgEnums, k) + } + } + + return pkgEnums +} diff --git a/vendor/github.com/nishanths/exhaustive/exhaustive.go b/vendor/github.com/nishanths/exhaustive/exhaustive.go new file mode 100644 index 0000000..73815a6 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/exhaustive.go @@ -0,0 +1,190 @@ +// Package exhaustive provides an analyzer that checks exhaustiveness of enum +// switch statements. The analyzer also provides fixes to make the offending +// switch statements exhaustive (see "Fixes" section). +// +// See "cmd/exhaustive" subpackage for the related command line program. +// +// Definition of enum +// +// The Go language spec does not provide an explicit definition for enums. +// For the purpose of this program, an enum type is a package-level named type +// whose underlying type is an integer (includes byte and rune), a float, or +// a string type. An enum type must have associated with it one or more +// package-level variables of the named type in the package. These variables +// constitute the enum's members. +// +// In the code snippet below, Biome is an enum type with 3 members. (You may +// also use iota instead of explicitly specifying values.) +// +// type Biome int +// +// const ( +// Tundra Biome = 1 +// Savanna Biome = 2 +// Desert Biome = 3 +// ) +// +// Switch statement exhaustiveness +// +// An enum switch statement is exhaustive if it has cases for each of the enum's members. +// +// For an enum type defined in the same package as the switch statement, both +// exported and unexported enum members must be present in order to consider +// the switch exhaustive. On the other hand, for an enum type defined +// in an external package it is sufficient for just exported enum members +// to be present in order to consider the switch exhaustive. +// +// Flags +// +// The analyzer accepts a boolean flag: -default-signifies-exhaustive. +// The flag, if enabled, indicates to the analyzer that switch statements +// are to be considered exhaustive as long as a 'default' case is present, even +// if all enum members aren't listed in the switch statements cases. +// +// The -check-generated boolean flag, disabled by default, indicates whether +// to check switch statements in generated Go source files. +// +// The other relevant flag is the -fix flag; its behavior is described +// in the next section. +// +// Fixes +// +// The analyzer suggests fixes for a switch statement if it is not exhaustive +// and does not have a 'default' case. The suggested fix always adds a single +// case clause for the missing enum members. +// +// case MissingA, MissingB, MissingC: +// panic(fmt.Sprintf("unhandled value: %v", v)) +// +// where v is the expression in the switch statement's tag (in other words, the +// value being switched upon). If the switch statement's tag is a function or a +// method call the analyzer does not suggest a fix, as reusing the call expression +// in the panic/fmt.Sprintf call could be mutative. +// +// The rationale for the fix using panic is that it might be better to fail loudly on +// existing unhandled or impossible cases than to let them slip by quietly unnoticed. +// An even better fix may, of course, be to manually inspect the sites reported +// by the package and handle the missing cases if necessary. +// +// Imports will be adjusted automatically to account for the "fmt" dependency. +// +// Skipping analysis +// +// If the following directive comment: +// +// //exhaustive:ignore +// +// is associated with a switch statement, the analyzer skips +// checking of the switch statement and no diagnostics are reported. +// +// Additionally, no diagnostics are reported for switch statements in +// generated files (see https://golang.org/s/generatedcode for definition of +// generated file), unless the -check-generated flag is enabled. +package exhaustive + +import ( + "go/ast" + "go/types" + "sort" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +// Flag names used by the analyzer. They are exported for use by analyzer +// driver programs. +const ( + DefaultSignifiesExhaustiveFlag = "default-signifies-exhaustive" + CheckGeneratedFlag = "check-generated" +) + +var ( + fDefaultSignifiesExhaustive bool + fCheckGeneratedFiles bool +) + +func init() { + Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "indicates that switch statements are to be considered exhaustive if a 'default' case is present, even if all enum members aren't listed in the switch") + Analyzer.Flags.BoolVar(&fCheckGeneratedFiles, CheckGeneratedFlag, false, "check switch statements in generated files also") +} + +var Analyzer = &analysis.Analyzer{ + Name: "exhaustive", + Doc: "check exhaustiveness of enum switch statements", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + FactTypes: []analysis.Fact{&enumsFact{}}, +} + +// IgnoreDirectivePrefix is used to exclude checking of specific switch statements. +// See package comment for details. +const IgnoreDirectivePrefix = "//exhaustive:ignore" + +func containsIgnoreDirective(comments []*ast.Comment) bool { + for _, c := range comments { + if strings.HasPrefix(c.Text, IgnoreDirectivePrefix) { + return true + } + } + return false +} + +type enumsFact struct { + Enums enums +} + +var _ analysis.Fact = (*enumsFact)(nil) + +func (e *enumsFact) AFact() {} + +func (e *enumsFact) String() string { + // sort for stability (required for testing) + var sortedKeys []string + for k := range e.Enums { + sortedKeys = append(sortedKeys, k) + } + sort.Strings(sortedKeys) + + var buf strings.Builder + for i, k := range sortedKeys { + v := e.Enums[k] + buf.WriteString(k) + buf.WriteString(":") + + for j, vv := range v.OrderedNames { + buf.WriteString(vv) + // add comma separator between each enum member in an enum type + if j != len(v.OrderedNames)-1 { + buf.WriteString(",") + } + } + // add semicolon separator between each enum type + if i != len(sortedKeys)-1 { + buf.WriteString("; ") + } + } + return buf.String() +} + +func run(pass *analysis.Pass) (interface{}, error) { + e := findEnums(pass) + if len(e) != 0 { + pass.ExportPackageFact(&enumsFact{Enums: e}) + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + comments := make(map[*ast.File]ast.CommentMap) // CommentMap per package file, lazily populated by reference + generated := make(map[*ast.File]bool) + + checkSwitchStatements(pass, inspect, comments, generated) + return nil, nil +} + +func enumTypeName(e *types.Named, samePkg bool) string { + if samePkg { + return e.Obj().Name() + } + return e.Obj().Pkg().Name() + "." + e.Obj().Name() +} diff --git a/vendor/github.com/nishanths/exhaustive/generated.go b/vendor/github.com/nishanths/exhaustive/generated.go new file mode 100644 index 0000000..19b4fb1 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/generated.go @@ -0,0 +1,34 @@ +package exhaustive + +import ( + "go/ast" + "strings" +) + +// Adapated from https://gotools.org/dmitri.shuralyov.com/go/generated + +func isGeneratedFile(file *ast.File) bool { + for _, c := range file.Comments { + for _, cc := range c.List { + s := cc.Text // "\n" already removed (see doc comment) + if len(s) >= 1 && s[len(s)-1] == '\r' { + s = s[:len(s)-1] // Trim "\r". + } + if containsGeneratedComment(s) { + return true + } + } + } + + return false +} + +func containsGeneratedComment(s string) bool { + return strings.HasPrefix(s, genCommentPrefix) && + strings.HasSuffix(s, genCommentSuffix) +} + +const ( + genCommentPrefix = "// Code generated " + genCommentSuffix = " DO NOT EDIT." +) diff --git a/vendor/github.com/nishanths/exhaustive/go.mod b/vendor/github.com/nishanths/exhaustive/go.mod new file mode 100644 index 0000000..9a75e51 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/go.mod @@ -0,0 +1,5 @@ +module github.com/nishanths/exhaustive + +go 1.14 + +require golang.org/x/tools v0.0.0-20201011145850-ed2f50202694 diff --git a/vendor/github.com/nishanths/exhaustive/go.sum b/vendor/github.com/nishanths/exhaustive/go.sum new file mode 100644 index 0000000..4f00a79 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/go.sum @@ -0,0 +1,38 @@ +github.com/yuin/goldmark v1.1.27 h1:nqDD4MMMQA0lmWq03Z2/myGPYLQoXtmi0rGVs95ntbo= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b h1:0mm1VjtFUOIlE1SbDlwjYaDxZVDP2S5ou6y0gSgXHu8= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200822124328-c89045814202 h1:VvcQYSHwXgi7W+TpUR6A9g6Up98WAHf3f/ulnJ62IyA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208 h1:qwRHBd0NqMbJxfbotnDhm2ByMI1Shq4Y6oRJo21SGJA= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200519015757-0d0afa43d58a h1:gILuVKC+ZPD6g/tj6zBOdnOH1ZHI0zZ86+KLMogc6/s= +golang.org/x/tools v0.0.0-20200519015757-0d0afa43d58a/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200519142718-10921354bc51 h1:GtYAC9y+dpwWCXBwbcZgxcFfiqW4SI93yvQqpF+9+P8= +golang.org/x/tools v0.0.0-20201011145850-ed2f50202694 h1:BANdcOVw3KTuUiyfDp7wrzCpkCe8UP3lowugJngxBTg= +golang.org/x/tools v0.0.0-20201011145850-ed2f50202694/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/nishanths/exhaustive/switch.go b/vendor/github.com/nishanths/exhaustive/switch.go new file mode 100644 index 0000000..2cec7f9 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/switch.go @@ -0,0 +1,431 @@ +package exhaustive + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "go/types" + "sort" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/ast/inspector" +) + +func isDefaultCase(c *ast.CaseClause) bool { + return c.List == nil // see doc comment on field +} + +func checkSwitchStatements( + pass *analysis.Pass, + inspect *inspector.Inspector, + comments map[*ast.File]ast.CommentMap, + generated map[*ast.File]bool, +) { + inspect.WithStack([]ast.Node{&ast.SwitchStmt{}}, func(n ast.Node, push bool, stack []ast.Node) bool { + if !push { + return true + } + + file := stack[0].(*ast.File) + + // Determine if file is a generated file, based on https://golang.org/s/generatedcode. + // If generated, don't check this file. + var isGenerated bool + if gen, ok := generated[file]; ok { + isGenerated = gen + } else { + isGenerated = isGeneratedFile(file) + generated[file] = isGenerated + } + if isGenerated && !fCheckGeneratedFiles { + // don't check + return true + } + + sw := n.(*ast.SwitchStmt) + if sw.Tag == nil { + return true + } + t := pass.TypesInfo.Types[sw.Tag] + if !t.IsValue() { + return true + } + tagType, ok := t.Type.(*types.Named) + if !ok { + return true + } + + tagPkg := tagType.Obj().Pkg() + if tagPkg == nil { + // Doc comment: nil for labels and objects in the Universe scope. + // This happens for the `error` type, for example. + // Continuing would mean that ImportPackageFact panics. + return true + } + + var enums enumsFact + if !pass.ImportPackageFact(tagPkg, &enums) { + // Can't do anything further. + return true + } + + em, isEnum := enums.Enums[tagType.Obj().Name()] + if !isEnum { + // Tag's type is not a known enum. + return true + } + + // Get comment map. + var allComments ast.CommentMap + if cm, ok := comments[file]; ok { + allComments = cm + } else { + allComments = ast.NewCommentMap(pass.Fset, file, file.Comments) + comments[file] = allComments + } + + specificComments := allComments.Filter(sw) + for _, group := range specificComments.Comments() { + if containsIgnoreDirective(group.List) { + return true // skip checking due to ignore directive + } + } + + samePkg := tagPkg == pass.Pkg + checkUnexported := samePkg + + hitlist := hitlistFromEnumMembers(em, checkUnexported) + if len(hitlist) == 0 { + // can happen if external package and enum consists only of + // unexported members + return true + } + + defaultCaseExists := false + for _, stmt := range sw.Body.List { + caseCl := stmt.(*ast.CaseClause) + if isDefaultCase(caseCl) { + defaultCaseExists = true + continue // nothing more to do if it's the default case + } + for _, e := range caseCl.List { + e = astutil.Unparen(e) + if samePkg { + ident, ok := e.(*ast.Ident) + if !ok { + continue + } + updateHitlist(hitlist, em, ident.Name) + } else { + selExpr, ok := e.(*ast.SelectorExpr) + if !ok { + continue + } + + // ensure X is package identifier + ident, ok := selExpr.X.(*ast.Ident) + if !ok { + continue + } + if !isPackageNameIdentifier(pass, ident) { + continue + } + + updateHitlist(hitlist, em, selExpr.Sel.Name) + } + } + } + + defaultSuffices := fDefaultSignifiesExhaustive && defaultCaseExists + shouldReport := len(hitlist) > 0 && !defaultSuffices + + if shouldReport { + reportSwitch(pass, sw, samePkg, tagType, em, hitlist, defaultCaseExists, file) + } + return true + }) +} + +func updateHitlist(hitlist map[string]struct{}, em *enumMembers, foundName string) { + constVal, ok := em.NameToValue[foundName] + if !ok { + // only delete the name alone from hitlist + delete(hitlist, foundName) + return + } + + // delete all of the same-valued names from hitlist + namesToDelete := em.ValueToNames[constVal] + for _, n := range namesToDelete { + delete(hitlist, n) + } +} + +func isPackageNameIdentifier(pass *analysis.Pass, ident *ast.Ident) bool { + obj := pass.TypesInfo.ObjectOf(ident) + if obj == nil { + return false + } + _, ok := obj.(*types.PkgName) + return ok +} + +func hitlistFromEnumMembers(em *enumMembers, checkUnexported bool) map[string]struct{} { + hitlist := make(map[string]struct{}) + for _, m := range em.OrderedNames { + if m == "_" { + // blank identifier is often used to skip entries in iota lists + continue + } + if !ast.IsExported(m) && !checkUnexported { + continue + } + hitlist[m] = struct{}{} + } + return hitlist +} + +func determineMissingOutput(missingMembers map[string]struct{}, em *enumMembers) []string { + constValMembers := make(map[string][]string) // value -> names + var otherMembers []string // non-constant value names + + for m := range missingMembers { + if constVal, ok := em.NameToValue[m]; ok { + constValMembers[constVal] = append(constValMembers[constVal], m) + } else { + otherMembers = append(otherMembers, m) + } + } + + missingOutput := make([]string, 0, len(constValMembers)+len(otherMembers)) + for _, names := range constValMembers { + sort.Strings(names) + missingOutput = append(missingOutput, strings.Join(names, "|")) + } + missingOutput = append(missingOutput, otherMembers...) + sort.Strings(missingOutput) + return missingOutput +} + +func reportSwitch( + pass *analysis.Pass, + sw *ast.SwitchStmt, + samePkg bool, + enumType *types.Named, + em *enumMembers, + missingMembers map[string]struct{}, + defaultCaseExists bool, + f *ast.File, +) { + missingOutput := determineMissingOutput(missingMembers, em) + + var fixes []analysis.SuggestedFix + if !defaultCaseExists { + if fix, ok := computeFix(pass, pass.Fset, f, sw, enumType, samePkg, missingMembers); ok { + fixes = append(fixes, fix) + } + } + + pass.Report(analysis.Diagnostic{ + Pos: sw.Pos(), + End: sw.End(), + Message: fmt.Sprintf("missing cases in switch of type %s: %s", enumTypeName(enumType, samePkg), strings.Join(missingOutput, ", ")), + SuggestedFixes: fixes, + }) +} + +func computeFix(pass *analysis.Pass, fset *token.FileSet, f *ast.File, sw *ast.SwitchStmt, enumType *types.Named, samePkg bool, missingMembers map[string]struct{}) (analysis.SuggestedFix, bool) { + // Function and method calls may be mutative, so we don't want to reuse the + // call expression in the about-to-be-inserted case clause body. So we just + // don't suggest a fix in such situations. + // + // However, we need to make an exception for type conversions, which are + // also call expressions in the AST. + // + // We'll need to lookup type information for this, and can't rely solely + // on the AST. + if containsFuncCall(pass, sw.Tag) { + return analysis.SuggestedFix{}, false + } + + textEdits := []analysis.TextEdit{ + missingCasesTextEdit(fset, f, samePkg, sw, enumType, missingMembers), + } + + // need to add "fmt" import if "fmt" import doesn't already exist + if !hasImportWithPath(fset, f, `"fmt"`) { + textEdits = append(textEdits, fmtImportTextEdit(fset, f)) + } + + missing := make([]string, 0, len(missingMembers)) + for m := range missingMembers { + missing = append(missing, m) + } + sort.Strings(missing) + + return analysis.SuggestedFix{ + Message: fmt.Sprintf("add case clause for: %s?", strings.Join(missing, ", ")), + TextEdits: textEdits, + }, true +} + +func containsFuncCall(pass *analysis.Pass, e ast.Expr) bool { + e = astutil.Unparen(e) + c, ok := e.(*ast.CallExpr) + if !ok { + return false + } + if _, isFunc := pass.TypesInfo.TypeOf(c.Fun).Underlying().(*types.Signature); isFunc { + return true + } + for _, a := range c.Args { + if containsFuncCall(pass, a) { + return true + } + } + return false +} + +func firstImportDecl(fset *token.FileSet, f *ast.File) *ast.GenDecl { + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if ok && genDecl.Tok == token.IMPORT { + // first IMPORT GenDecl + return genDecl + } + } + return nil +} + +// copies an GenDecl in a manner such that appending to the returned GenDecl's Specs field +// doesn't mutate the original GenDecl +func copyGenDecl(im *ast.GenDecl) *ast.GenDecl { + imCopy := *im + imCopy.Specs = make([]ast.Spec, len(im.Specs)) + for i := range im.Specs { + imCopy.Specs[i] = im.Specs[i] + } + return &imCopy +} + +func hasImportWithPath(fset *token.FileSet, f *ast.File, pathLiteral string) bool { + igroups := astutil.Imports(fset, f) + for _, igroup := range igroups { + for _, importSpec := range igroup { + if importSpec.Path.Value == pathLiteral { + return true + } + } + } + return false +} + +func fmtImportTextEdit(fset *token.FileSet, f *ast.File) analysis.TextEdit { + firstDecl := firstImportDecl(fset, f) + + if firstDecl == nil { + // file has no import declarations + // insert "fmt" import spec after package statement + return analysis.TextEdit{ + Pos: f.Name.End() + 1, // end of package name + 1 + End: f.Name.End() + 1, + NewText: []byte(`import ( + "fmt" + )`), + } + } + + // copy because we'll be mutating its Specs field + firstDeclCopy := copyGenDecl(firstDecl) + + // find insertion index for "fmt" import spec + var i int + for ; i < len(firstDeclCopy.Specs); i++ { + im := firstDeclCopy.Specs[i].(*ast.ImportSpec) + if v, _ := strconv.Unquote(im.Path.Value); v > "fmt" { + break + } + } + + // insert "fmt" import spec at the index + fmtSpec := &ast.ImportSpec{ + Path: &ast.BasicLit{ + // NOTE: Pos field doesn't seem to be required for our + // purposes here. + Kind: token.STRING, + Value: `"fmt"`, + }, + } + s := firstDeclCopy.Specs // local var for easier comprehension of next line + s = append(s[:i], append([]ast.Spec{fmtSpec}, s[i:]...)...) + firstDeclCopy.Specs = s + + // create the text edit + var buf bytes.Buffer + printer.Fprint(&buf, fset, firstDeclCopy) + + return analysis.TextEdit{ + Pos: firstDecl.Pos(), + End: firstDecl.End(), + NewText: buf.Bytes(), + } +} + +func missingCasesTextEdit(fset *token.FileSet, f *ast.File, samePkg bool, sw *ast.SwitchStmt, enumType *types.Named, missingMembers map[string]struct{}) analysis.TextEdit { + // ... Construct insertion text for case clause and its body ... + + var tag bytes.Buffer + printer.Fprint(&tag, fset, sw.Tag) + + // If possible and if necessary, determine the package identifier based on the AST of other `case` clauses. + var pkgIdent *ast.Ident + if !samePkg { + for _, stmt := range sw.Body.List { + caseCl := stmt.(*ast.CaseClause) + // At least one expression must exist in List at this point. + // List cannot be nil because we only arrive here if the "default" clause + // does not exist. Additionally, a syntactically valid case clause must + // have at least one expression. + if sel, ok := caseCl.List[0].(*ast.SelectorExpr); ok { + pkgIdent = sel.X.(*ast.Ident) + break + } + } + } + + missing := make([]string, 0, len(missingMembers)) + for m := range missingMembers { + if !samePkg { + if pkgIdent != nil { + // we were able to determine package identifier + missing = append(missing, pkgIdent.Name+"."+m) + } else { + // use the package name (may not be correct always) + // + // TODO: May need to also add import if the package isn't imported + // elsewhere. This (ie, a switch with zero case clauses) should + // happen rarely, so don't implement this for now. + missing = append(missing, enumType.Obj().Pkg().Name()+"."+m) + } + } else { + missing = append(missing, m) + } + } + sort.Strings(missing) + + insert := `case ` + strings.Join(missing, ", ") + `: + panic(fmt.Sprintf("unhandled value: %v",` + tag.String() + `))` + + // ... Create the text edit ... + + return analysis.TextEdit{ + Pos: sw.Body.Rbrace - 1, + End: sw.Body.Rbrace - 1, + NewText: []byte(insert), + } +} diff --git a/vendor/github.com/nishanths/predeclared/LICENSE b/vendor/github.com/nishanths/predeclared/LICENSE new file mode 100644 index 0000000..9462123 --- /dev/null +++ b/vendor/github.com/nishanths/predeclared/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2017, Nishanth Shanmugham +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go b/vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go new file mode 100644 index 0000000..4083efc --- /dev/null +++ b/vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go @@ -0,0 +1,9 @@ +// +build go1.8 + +package predeclared + +import "go/doc" + +func isPredeclaredIdent(name string) bool { + return doc.IsPredeclared(name) +} diff --git a/vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go b/vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go new file mode 100644 index 0000000..5780e0b --- /dev/null +++ b/vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go @@ -0,0 +1,53 @@ +// +build !go1.8 + +package predeclared + +func isPredeclaredIdent(name string) bool { + return predeclaredIdents[name] +} + +// Keep in sync with https://golang.org/ref/spec#Predeclared_identifiers +var predeclaredIdents = map[string]bool{ + "bool": true, + "byte": true, + "complex64": true, + "complex128": true, + "error": true, + "float32": true, + "float64": true, + "int": true, + "int8": true, + "int16": true, + "int32": true, + "int64": true, + "rune": true, + "string": true, + "uint": true, + "uint8": true, + "uint16": true, + "uint32": true, + "uint64": true, + "uintptr": true, + + "true": true, + "false": true, + "iota": true, + + "nil": true, + + "append": true, + "cap": true, + "close": true, + "complex": true, + "copy": true, + "delete": true, + "imag": true, + "len": true, + "make": true, + "new": true, + "panic": true, + "print": true, + "println": true, + "real": true, + "recover": true, +} diff --git a/vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go b/vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go new file mode 100644 index 0000000..67c0e0a --- /dev/null +++ b/vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go @@ -0,0 +1,202 @@ +// Package predeclared provides a static analysis (used by the predeclared command) +// that can detect declarations in Go code that shadow one of Go's predeclared identifiers. +package predeclared + +import ( + "fmt" + "go/ast" + "go/token" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// Flag names used by the analyzer. They are exported for use by analyzer +// driver programs. +const ( + IgnoreFlag = "ignore" + QualifiedFlag = "q" +) + +var ( + fIgnore string + fQualified bool +) + +func init() { + Analyzer.Flags.StringVar(&fIgnore, IgnoreFlag, "", "comma-separated list of predeclared identifiers to not report on") + Analyzer.Flags.BoolVar(&fQualified, QualifiedFlag, false, "include method names and field names (i.e., qualified names) in checks") +} + +var Analyzer = &analysis.Analyzer{ + Name: "predeclared", + Doc: "find code that shadows one of Go's predeclared identifiers", + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + cfg := newConfig(fIgnore, fQualified) + for _, file := range pass.Files { + processFile(pass.Report, cfg, pass.Fset, file) + } + return nil, nil +} + +type config struct { + qualified bool + ignoredIdents map[string]struct{} +} + +func newConfig(ignore string, qualified bool) *config { + cfg := &config{ + qualified: qualified, + ignoredIdents: map[string]struct{}{}, + } + for _, s := range strings.Split(ignore, ",") { + ident := strings.TrimSpace(s) + if ident == "" { + continue + } + cfg.ignoredIdents[ident] = struct{}{} + } + return cfg +} + +type issue struct { + ident *ast.Ident + kind string + fset *token.FileSet +} + +func (i issue) String() string { + pos := i.fset.Position(i.ident.Pos()) + return fmt.Sprintf("%s: %s %s has same name as predeclared identifier", pos, i.kind, i.ident.Name) +} + +func processFile(report func(analysis.Diagnostic), cfg *config, fset *token.FileSet, file *ast.File) []issue { // nolint: gocyclo + var issues []issue + + maybeReport := func(x *ast.Ident, kind string) { + if _, isIgnored := cfg.ignoredIdents[x.Name]; !isIgnored && isPredeclaredIdent(x.Name) { + report(analysis.Diagnostic{ + Pos: x.Pos(), + End: x.End(), + Message: fmt.Sprintf("%s %s has same name as predeclared identifier", kind, x.Name), + }) + issues = append(issues, issue{x, kind, fset}) + } + } + + seenValueSpecs := make(map[*ast.ValueSpec]bool) + + // TODO: consider deduping package name issues for files in the + // same directory. + maybeReport(file.Name, "package name") + + for _, spec := range file.Imports { + if spec.Name == nil { + continue + } + maybeReport(spec.Name, "import name") + } + + // Handle declarations and fields. + // https://golang.org/ref/spec#Declarations_and_scope + ast.Inspect(file, func(n ast.Node) bool { + switch x := n.(type) { + case *ast.GenDecl: + var kind string + switch x.Tok { + case token.CONST: + kind = "const" + case token.VAR: + kind = "variable" + default: + return true + } + for _, spec := range x.Specs { + if vspec, ok := spec.(*ast.ValueSpec); ok && !seenValueSpecs[vspec] { + seenValueSpecs[vspec] = true + for _, name := range vspec.Names { + maybeReport(name, kind) + } + } + } + return true + case *ast.TypeSpec: + maybeReport(x.Name, "type") + return true + case *ast.StructType: + if cfg.qualified && x.Fields != nil { + for _, field := range x.Fields.List { + for _, name := range field.Names { + maybeReport(name, "field") + } + } + } + return true + case *ast.InterfaceType: + if cfg.qualified && x.Methods != nil { + for _, meth := range x.Methods.List { + for _, name := range meth.Names { + maybeReport(name, "method") + } + } + } + return true + case *ast.FuncDecl: + if x.Recv == nil { + // it's a function + maybeReport(x.Name, "function") + } else { + // it's a method + if cfg.qualified { + maybeReport(x.Name, "method") + } + } + // add receivers idents + if x.Recv != nil { + for _, field := range x.Recv.List { + for _, name := range field.Names { + maybeReport(name, "receiver") + } + } + } + // Params and Results will be checked in the *ast.FuncType case. + return true + case *ast.FuncType: + // add params idents + for _, field := range x.Params.List { + for _, name := range field.Names { + maybeReport(name, "param") + } + } + // add returns idents + if x.Results != nil { + for _, field := range x.Results.List { + for _, name := range field.Names { + maybeReport(name, "named return") + } + } + } + return true + case *ast.LabeledStmt: + maybeReport(x.Label, "label") + return true + case *ast.AssignStmt: + // We only care about short variable declarations, which use token.DEFINE. + if x.Tok == token.DEFINE { + for _, expr := range x.Lhs { + if ident, ok := expr.(*ast.Ident); ok { + maybeReport(ident, "variable") + } + } + } + return true + default: + return true + } + }) + + return issues +} diff --git a/vendor/github.com/phayes/checkstyle/.scrutinizer.yml b/vendor/github.com/phayes/checkstyle/.scrutinizer.yml new file mode 100644 index 0000000..d9284b6 --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/.scrutinizer.yml @@ -0,0 +1,15 @@ +build: + dependencies: + before: + - 'source <(curl -fsSL https://raw.githubusercontent.com/phayes/go-scrutinize/master/install-golang)' + + tests: + override: + - + command: 'cd $PROJECTPATH && go-scrutinize' + coverage: + file: 'coverage.xml' + format: 'clover' + analysis: + file: 'checkstyle_report.xml' + format: 'general-checkstyle' \ No newline at end of file diff --git a/vendor/github.com/phayes/checkstyle/LICENSE b/vendor/github.com/phayes/checkstyle/LICENSE new file mode 100644 index 0000000..6dc912f --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2017, Patrick D Hayes +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/phayes/checkstyle/README.md b/vendor/github.com/phayes/checkstyle/README.md new file mode 100644 index 0000000..358cf67 --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/README.md @@ -0,0 +1,44 @@ +# checkstyle +[![GoDoc](https://godoc.org/github.com/phayes/checkstyle?status.svg)](https://godoc.org/github.com/phayes/checkstyle) +[![Go Report Card](https://goreportcard.com/badge/github.com/phayes/checkstyle)](https://goreportcard.com/report/github.com/phayes/checkstyle) +[![Build Status](https://scrutinizer-ci.com/g/phayes/checkstyle/badges/build.png?b=master)](https://scrutinizer-ci.com/g/phayes/checkstyle/build-status/master) + +Read and write checksyle_report.xml files with golang + +Checkstyle XML files are a standard file format for reporting errors in source code, and is often generated by static analysis tools. + +Example usage: + +```go + +import "github.com/phayes/checkstyle" + +// Print XML into human readable format +checkSyle, err := checkstyle.ReadFile("checkstyle_report.xml") +if err != nil { + log.Fatal(err) +} +for _, file := range checkStyle.File { + fmt.Println(File.Name) + for _, codingError := range file.Error { + fmt.Println("\t", codingError.Line, codingError.Message) + } +} + +// Create a new XML file from scratch +check := checkstyle.New() + +// Ensure that a file has been added +file := check.EnsureFile("/path/to/file") + +// Create an error on line 10 +codingError := checkstyle.NewError(10, "format", "line must end with a full stop") + +// Add the error to the file +file.AddError(codingError) + +// Output XML +fmt.Print(check) +``` + +For more information on checkstyle XML see: http://checkstyle.sourceforge.net/checks.html diff --git a/vendor/github.com/phayes/checkstyle/checkstyle.go b/vendor/github.com/phayes/checkstyle/checkstyle.go new file mode 100644 index 0000000..cabbd4b --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/checkstyle.go @@ -0,0 +1,112 @@ +package checkstyle + +import "encoding/xml" +import "io/ioutil" + +// DefaultCheckStyleVersion defines the default "version" attribute on "" lememnt +var DefaultCheckStyleVersion = "1.0.0" + +// Severity defines a checkstyle severity code +type Severity string + +var ( + SeverityError Severity = "error" + SeverityInfo Severity = "info" + SeverityWarning Severity = "warning" + SeverityIgnore Severity = "ignore" + SeverityNone Severity +) + +// CheckStyle represents a xml element found in a checkstyle_report.xml file. +type CheckStyle struct { + XMLName xml.Name `xml:"checkstyle"` + Version string `xml:"version,attr"` + File []*File `xml:"file"` +} + +// AddFile adds a checkstyle.File with the given filename. +func (cs *CheckStyle) AddFile(csf *File) { + cs.File = append(cs.File, csf) +} + +// GetFile gets a CheckStyleFile with the given filename. +func (cs *CheckStyle) GetFile(filename string) (csf *File, ok bool) { + for _, file := range cs.File { + if file.Name == filename { + csf = file + ok = true + return + } + } + return +} + +// EnsureFile ensures that a CheckStyleFile with the given name exists +// Returns either an exiting CheckStyleFile (if a file with that name exists) +// or a new CheckStyleFile (if a file with that name does not exists) +func (cs *CheckStyle) EnsureFile(filename string) (csf *File) { + csf, ok := cs.GetFile(filename) + if !ok { + csf = NewFile(filename) + cs.AddFile(csf) + } + return csf +} + +// String implements Stringer. Returns as xml. +func (cs *CheckStyle) String() string { + checkStyleXML, err := xml.Marshal(cs) + if err != nil { + panic(err) + } + return string(checkStyleXML) +} + +// New returns a new CheckStyle +func New() *CheckStyle { + return &CheckStyle{Version: DefaultCheckStyleVersion, File: []*File{}} +} + +// File represents a xml element. +type File struct { + XMLName xml.Name `xml:"file"` + Name string `xml:"name,attr"` + Error []*Error `xml:"error"` +} + +// AddError adds a checkstyle.Error to the file. +func (csf *File) AddError(cse *Error) { + csf.Error = append(csf.Error, cse) +} + +// NewFile creates a new checkstyle.File +func NewFile(filename string) *File { + return &File{Name: filename, Error: []*Error{}} +} + +// Error represents a xml element +type Error struct { + XMLName xml.Name `xml:"error"` + Line int `xml:"line,attr"` + Column int `xml:"column,attr,omitempty"` + Severity Severity `xml:"severity,attr,omitempty"` + Message string `xml:"message,attr"` + Source string `xml:"source,attr"` +} + +// NewError creates a new checkstyle.Error +// Note that line starts at 0, and column starts at 1 +func NewError(line int, column int, severity Severity, message string, source string) *Error { + return &Error{Line: line, Column: column, Severity: severity, Message: message, Source: source} +} + +// ReadFile reads a checkfile.xml file and returns a CheckStyle object. +func ReadFile(filename string) (*CheckStyle, error) { + checkStyleXML, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + checkStyle := New() + err = xml.Unmarshal(checkStyleXML, checkStyle) + return checkStyle, err +} diff --git a/vendor/github.com/phayes/checkstyle/godoc.go b/vendor/github.com/phayes/checkstyle/godoc.go new file mode 100644 index 0000000..c9662fe --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/godoc.go @@ -0,0 +1,36 @@ +/* +Package checkstyle allows the parsing of generation of checkstyle XML files. + +Checkstyle XML files are a standard file format for reporting errors in source code, and is often generated by static analysis tools. + +Example usage: + // Print XML into human readable format + checkSyle, err := checkstyle.ReadFile("checkstyle_report.xml") + if err != nil { + log.Fatal(err) + } + for _, file := range checkStyle.File { + fmt.Println(File.Name) + for _, codingError := range file.Error { + fmt.Println("\t", codingError.Line, codingError.Message) + } + } + + // Create a new XML file from scratch + check := checkstyle.New() + + // Ensure that a file has been added + file := check.EnsureFile("/path/to/file") + + // Create an error on line 10, column 5 + codingError := checkstyle.NewError(10, 5, checkstyle.SeverityWarning, "format", "line must end with a full stop") + + // Add the error to the file + file.AddError(codingError) + + // Output XML + fmt.Print(check) + +For more information on checkstyle XML see: http://checkstyle.sourceforge.net/checks.html +*/ +package checkstyle diff --git a/vendor/github.com/polyfloyd/go-errorlint/LICENSE b/vendor/github.com/polyfloyd/go-errorlint/LICENSE new file mode 100644 index 0000000..b7f88cf --- /dev/null +++ b/vendor/github.com/polyfloyd/go-errorlint/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 polyfloyd + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go new file mode 100644 index 0000000..20d1c1e --- /dev/null +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go @@ -0,0 +1,44 @@ +package errorlint + +import ( + "flag" + "sort" + + "golang.org/x/tools/go/analysis" +) + +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "errorlint", + Doc: "Source code linter for Go software that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.", + Run: run, + Flags: flagSet, + } +} + +var ( + flagSet flag.FlagSet + checkErrorf bool +) + +func init() { + flagSet.BoolVar(&checkErrorf, "errorf", false, "Check whether fmt.Errorf uses the %w verb for formatting errors. See the readme for caveats") +} + +func run(pass *analysis.Pass) (interface{}, error) { + lints := []Lint{} + if checkErrorf { + l := LintFmtErrorfCalls(pass.Fset, *pass.TypesInfo) + lints = append(lints, l...) + } + l := LintErrorComparisons(pass.Fset, *pass.TypesInfo) + lints = append(lints, l...) + l = LintErrorTypeAssertions(pass.Fset, *pass.TypesInfo) + lints = append(lints, l...) + sort.Sort(ByPosition(lints)) + + for _, l := range lints { + pass.Report(analysis.Diagnostic{Pos: l.Pos, Message: l.Message}) + } + return nil, nil +} diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go new file mode 100644 index 0000000..e9f889c --- /dev/null +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go @@ -0,0 +1,245 @@ +package errorlint + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "regexp" +) + +type Lint struct { + Message string + Pos token.Pos +} + +type ByPosition []Lint + +func (l ByPosition) Len() int { return len(l) } +func (l ByPosition) Swap(i, j int) { l[i], l[j] = l[j], l[i] } + +func (l ByPosition) Less(i, j int) bool { + return l[i].Pos < l[j].Pos +} + +func LintFmtErrorfCalls(fset *token.FileSet, info types.Info) []Lint { + lints := []Lint{} + for expr, t := range info.Types { + // Search for error expressions that are the result of fmt.Errorf + // invocations. + if t.Type.String() != "error" { + continue + } + call, ok := isFmtErrorfCallExpr(info, expr) + if !ok { + continue + } + + // Find all % fields in the format string. + formatVerbs, ok := printfFormatStringVerbs(info, call) + if !ok { + continue + } + + // For any arguments that are errors, check whether the wrapping verb + // is used. Only one %w verb may be used in a single format string at a + // time, so we stop after finding a correct %w. + var lintArg ast.Expr + args := call.Args[1:] + for i := 0; i < len(args) && i < len(formatVerbs); i++ { + if info.Types[args[i]].Type.String() != "error" && !isErrorStringCall(info, args[i]) { + continue + } + + if formatVerbs[i] == "%w" { + lintArg = nil + break + } + + if lintArg == nil { + lintArg = args[i] + } + } + if lintArg != nil { + lints = append(lints, Lint{ + Message: "non-wrapping format verb for fmt.Errorf. Use `%w` to format errors", + Pos: lintArg.Pos(), + }) + } + } + return lints +} + +// isErrorStringCall tests whether the expression is a string expression that +// is the result of an `(error).Error()` method call. +func isErrorStringCall(info types.Info, expr ast.Expr) bool { + if info.Types[expr].Type.String() == "string" { + if call, ok := expr.(*ast.CallExpr); ok { + if callSel, ok := call.Fun.(*ast.SelectorExpr); ok { + fun := info.Uses[callSel.Sel].(*types.Func) + return fun.Type().String() == "func() string" && fun.Name() == "Error" + } + } + } + return false +} + +func printfFormatStringVerbs(info types.Info, call *ast.CallExpr) ([]string, bool) { + if len(call.Args) <= 1 { + return nil, false + } + strLit, ok := call.Args[0].(*ast.BasicLit) + if !ok { + // Ignore format strings that are not literals. + return nil, false + } + formatString := constant.StringVal(info.Types[strLit].Value) + + // Naive format string argument verb. This does not take modifiers such as + // padding into account... + re := regexp.MustCompile(`%[^%]`) + return re.FindAllString(formatString, -1), true +} + +func isFmtErrorfCallExpr(info types.Info, expr ast.Expr) (*ast.CallExpr, bool) { + call, ok := expr.(*ast.CallExpr) + if !ok { + return nil, false + } + fn, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + // TODO: Support fmt.Errorf variable aliases? + return nil, false + } + obj := info.Uses[fn.Sel] + + pkg := obj.Pkg() + if pkg != nil && pkg.Name() == "fmt" && obj.Name() == "Errorf" { + return call, true + } + return nil, false +} + +func LintErrorComparisons(fset *token.FileSet, info types.Info) []Lint { + lints := []Lint{} + + for expr := range info.Types { + // Find == and != operations. + binExpr, ok := expr.(*ast.BinaryExpr) + if !ok { + continue + } + if binExpr.Op != token.EQL && binExpr.Op != token.NEQ { + continue + } + // Comparing errors with nil is okay. + if isNilComparison(binExpr) { + continue + } + // Find comparisons of which one side is a of type error. + if !isErrorComparison(info, binExpr) { + continue + } + + lints = append(lints, Lint{ + Message: fmt.Sprintf("comparing with %s will fail on wrapped errors. Use errors.Is to check for a specific error", binExpr.Op), + Pos: binExpr.Pos(), + }) + } + + for scope := range info.Scopes { + // Find value switch blocks. + switchStmt, ok := scope.(*ast.SwitchStmt) + if !ok { + continue + } + // Check whether the switch operates on an error type. + if switchStmt.Tag == nil { + continue + } + tagType := info.Types[switchStmt.Tag] + if tagType.Type.String() != "error" { + continue + } + + lints = append(lints, Lint{ + Message: "switch on an error will fail on wrapped errors. Use errors.Is to check for specific errors", + Pos: switchStmt.Pos(), + }) + } + + return lints +} + +func isNilComparison(binExpr *ast.BinaryExpr) bool { + if ident, ok := binExpr.X.(*ast.Ident); ok && ident.Name == "nil" { + return true + } + if ident, ok := binExpr.Y.(*ast.Ident); ok && ident.Name == "nil" { + return true + } + return false +} + +func isErrorComparison(info types.Info, binExpr *ast.BinaryExpr) bool { + tx := info.Types[binExpr.X] + ty := info.Types[binExpr.Y] + return tx.Type.String() == "error" || ty.Type.String() == "error" +} + +func LintErrorTypeAssertions(fset *token.FileSet, info types.Info) []Lint { + lints := []Lint{} + + for expr := range info.Types { + // Find type assertions. + typeAssert, ok := expr.(*ast.TypeAssertExpr) + if !ok { + continue + } + + // Find type assertions that operate on values of type error. + if !isErrorTypeAssertion(info, typeAssert) { + continue + } + + lints = append(lints, Lint{ + Message: "type assertion on error will fail on wrapped errors. Use errors.As to check for specific errors", + Pos: typeAssert.Pos(), + }) + } + + for scope := range info.Scopes { + // Find type switches. + typeSwitch, ok := scope.(*ast.TypeSwitchStmt) + if !ok { + continue + } + + // Find the type assertion in the type switch. + var typeAssert *ast.TypeAssertExpr + switch t := typeSwitch.Assign.(type) { + case *ast.ExprStmt: + typeAssert = t.X.(*ast.TypeAssertExpr) + case *ast.AssignStmt: + typeAssert = t.Rhs[0].(*ast.TypeAssertExpr) + } + + // Check whether the type switch is on a value of type error. + if !isErrorTypeAssertion(info, typeAssert) { + continue + } + + lints = append(lints, Lint{ + Message: "type switch on error will fail on wrapped errors. Use errors.As to check for specific errors", + Pos: typeAssert.Pos(), + }) + } + + return lints +} + +func isErrorTypeAssertion(info types.Info, typeAssert *ast.TypeAssertExpr) bool { + t := info.Types[typeAssert.X] + return t.Type.String() == "error" +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/LICENSE b/vendor/github.com/quasilyte/go-ruleguard/LICENSE new file mode 100644 index 0000000..f0381fb --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2019, Iskander (Alex) Sharipov / quasilyte +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/quasilyte/go-ruleguard/dslgen/dsl_sources.go b/vendor/github.com/quasilyte/go-ruleguard/dslgen/dsl_sources.go new file mode 100644 index 0000000..181e157 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/dslgen/dsl_sources.go @@ -0,0 +1,3 @@ +package dslgen + +var Fluent = []byte("package fluent\n\n// Matcher is a main API group-level entry point.\n// It's used to define and configure the group rules.\n// It also represents a map of all rule-local variables.\ntype Matcher map[string]Var\n\n// Import loads given package path into a rule group imports table.\n//\n// That table is used during the rules compilation.\n//\n// The table has the following effect on the rules:\n//\t* For type expressions, it's used to resolve the\n//\t full package paths of qualified types, like `foo.Bar`.\n//\t If Import(`a/b/foo`) is called, `foo.Bar` will match\n//\t `a/b/foo.Bar` type during the pattern execution.\nfunc (m Matcher) Import(pkgPath string) {}\n\n// Match specifies a set of patterns that match a rule being defined.\n// Pattern matching succeeds if at least 1 pattern matches.\n//\n// If none of the given patterns matched, rule execution stops.\nfunc (m Matcher) Match(pattern string, alternatives ...string) Matcher {\n\treturn m\n}\n\n// Where applies additional constraint to a match.\n// If a given cond is not satisfied, a match is rejected and\n// rule execution stops.\nfunc (m Matcher) Where(cond bool) Matcher {\n\treturn m\n}\n\n// Report prints a message if associated rule match is successful.\n//\n// A message is a string that can contain interpolated expressions.\n// For every matched variable it's possible to interpolate\n// their printed representation into the message text with $.\n// An entire match can be addressed with $$.\nfunc (m Matcher) Report(message string) Matcher {\n\treturn m\n}\n\n// Suggest assigns a quickfix suggestion for the matched code.\nfunc (m Matcher) Suggest(suggestion string) Matcher {\n\treturn m\n}\n\n// At binds the reported node to a named submatch.\n// If no explicit location is given, the outermost node ($$) is used.\nfunc (m Matcher) At(v Var) Matcher {\n\treturn m\n}\n\n// File returns the current file context.\nfunc (m Matcher) File() File { return File{} }\n\n// Var is a pattern variable that describes a named submatch.\ntype Var struct {\n\t// Pure reports whether expr matched by var is side-effect-free.\n\tPure bool\n\n\t// Const reports whether expr matched by var is a constant value.\n\tConst bool\n\n\t// Addressable reports whether the corresponding expression is addressable.\n\t// See https://golang.org/ref/spec#Address_operators.\n\tAddressable bool\n\n\t// Type is a type of a matched expr.\n\t//\n\t// For function call expressions, a type is a function result type,\n\t// but for a function expression itself it's a *types.Signature.\n\t//\n\t// Suppose we have a `a.b()` expression:\n\t//\t`$x()` m[\"x\"].Type is `a.b` function type\n\t//\t`$x` m[\"x\"].Type is `a.b()` function call result type\n\tType ExprType\n\n\t// Text is a captured node text as in the source code.\n\tText MatchedText\n}\n\n// ExprType describes a type of a matcher expr.\ntype ExprType struct {\n\t// Size represents expression type size in bytes.\n\tSize int\n}\n\n// Underlying returns expression type underlying type.\n// See https://golang.org/pkg/go/types/#Type Underlying() method documentation.\n// Read https://golang.org/ref/spec#Types section to learn more about underlying types.\nfunc (ExprType) Underlying() ExprType { return underlyingType }\n\n// AssignableTo reports whether a type is assign-compatible with a given type.\n// See https://golang.org/pkg/go/types/#AssignableTo.\nfunc (ExprType) AssignableTo(typ string) bool { return boolResult }\n\n// ConvertibleTo reports whether a type is conversible to a given type.\n// See https://golang.org/pkg/go/types/#ConvertibleTo.\nfunc (ExprType) ConvertibleTo(typ string) bool { return boolResult }\n\n// Implements reports whether a type implements a given interface.\n// See https://golang.org/pkg/go/types/#Implements.\nfunc (ExprType) Implements(typ string) bool { return boolResult }\n\n// Is reports whether a type is identical to a given type.\nfunc (ExprType) Is(typ string) bool { return boolResult }\n\n// MatchedText represents a source text associated with a matched node.\ntype MatchedText string\n\n// Matches reports whether the text matches the given regexp pattern.\nfunc (MatchedText) Matches(pattern string) bool { return boolResult }\n\n// String represents an arbitrary string-typed data.\ntype String string\n\n// Matches reports whether a string matches the given regexp pattern.\nfunc (String) Matches(pattern string) bool { return boolResult }\n\n// File represents the current Go source file.\ntype File struct {\n\t// Name is a file base name.\n\tName String\n}\n\n// Imports reports whether the current file imports the given path.\nfunc (File) Imports(path string) bool { return boolResult }\n\n\n\nvar boolResult bool\n\nvar underlyingType ExprType\n\n") diff --git a/vendor/github.com/quasilyte/go-ruleguard/dslgen/dslgen.go b/vendor/github.com/quasilyte/go-ruleguard/dslgen/dslgen.go new file mode 100644 index 0000000..a2269b2 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/dslgen/dslgen.go @@ -0,0 +1,53 @@ +// +build generate + +package main + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "path/filepath" +) + +func main() { + // See #23. + + data, err := dirToBytes("../dsl/fluent") + if err != nil { + panic(err) + } + + f, err := os.Create("./dsl_sources.go") + if err != nil { + panic(err) + } + defer f.Close() + + fmt.Fprintf(f, `package dslgen + +var Fluent = []byte(%q) +`, string(data)) +} + +func dirToBytes(dir string) ([]byte, error) { + files, err := ioutil.ReadDir(dir) + if err != nil { + return nil, err + } + + var buf bytes.Buffer + for i, f := range files { + data, err := ioutil.ReadFile(filepath.Join(dir, f.Name())) + if err != nil { + return nil, err + } + if i != 0 { + newline := bytes.IndexByte(data, '\n') + data = data[newline:] + } + buf.Write(data) + buf.WriteByte('\n') + } + return buf.Bytes(), nil +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/.gitattributes b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/.gitattributes new file mode 100644 index 0000000..6f95229 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/.gitattributes @@ -0,0 +1,2 @@ +# To prevent CRLF breakages on Windows for fragile files, like testdata. +* -text diff --git a/vendor/github.com/gogo/protobuf/LICENSE b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/LICENSE similarity index 73% rename from vendor/github.com/gogo/protobuf/LICENSE rename to vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/LICENSE index f57de90..a06c5eb 100644 --- a/vendor/github.com/gogo/protobuf/LICENSE +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/LICENSE @@ -1,23 +1,16 @@ -Copyright (c) 2013, The GoGo Authors. All rights reserved. - -Protocol Buffers for Go with Gadgets - -Go support for Protocol Buffers - Google's data interchange format - -Copyright 2010 The Go Authors. All rights reserved. -https://github.com/golang/protobuf +Copyright (c) 2017, Daniel Martí. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - * Neither the name of Google Inc. nor the names of its + * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. @@ -32,4 +25,3 @@ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/README.md b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/README.md new file mode 100644 index 0000000..12cb0fd --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/README.md @@ -0,0 +1,55 @@ +# gogrep + + go get mvdan.cc/gogrep + +Search for Go code using syntax trees. Work in progress. + + gogrep -x 'if $x != nil { return $x, $*_ }' + +### Instructions + + usage: gogrep commands [packages] + +A command is of the form "-A pattern", where -A is one of: + + -x find all nodes matching a pattern + -g discard nodes not matching a pattern + -v discard nodes matching a pattern + -a filter nodes by certain attributes + -s substitute with a given syntax tree + -w write source back to disk or stdout + +A pattern is a piece of Go code which may include wildcards. It can be: + + a statement (many if split by semicolonss) + an expression (many if split by commas) + a type expression + a top-level declaration (var, func, const) + an entire file + +Wildcards consist of `$` and a name. All wildcards with the same name +within an expression must match the same node, excluding "_". Example: + + $x.$_ = $x // assignment of self to a field in self + +If `*` is before the name, it will match any number of nodes. Example: + + fmt.Fprintf(os.Stdout, $*_) // all Fprintfs on stdout + +`*` can also be used to match optional nodes, like: + + for $*_ { $*_ } // will match all for loops + if $*_; $b { $*_ } // will match all ifs with condition $b + +Regexes can also be used to match certain identifier names only. The +`.*` pattern can be used to match all identifiers. Example: + + fmt.$(_ /Fprint.*/)(os.Stdout, $*_) // all Fprint* on stdout + +The nodes resulting from applying the commands will be printed line by +line to standard output. + +Here are two simple examples of the -a operand: + + gogrep -x '$x + $y' // will match both numerical and string "+" operations + gogrep -x '$x + $y' -a 'type(string)' // matches only string concatenations diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go new file mode 100644 index 0000000..f366af8 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go @@ -0,0 +1,61 @@ +package gogrep + +import ( + "go/ast" + "go/token" + "go/types" +) + +// This is an ugly way to use gogrep as a library. +// It can go away when there will be another option. + +// Parse creates a gogrep pattern out of a given string expression. +func Parse(fset *token.FileSet, expr string) (*Pattern, error) { + m := matcher{ + fset: fset, + Info: &types.Info{}, + } + node, err := m.parseExpr(expr) + if err != nil { + return nil, err + } + return &Pattern{m: &m, Expr: node}, nil +} + +// Pattern is a compiled gogrep pattern. +type Pattern struct { + Expr ast.Node + m *matcher +} + +// MatchData describes a successful pattern match. +type MatchData struct { + Node ast.Node + Values map[string]ast.Node +} + +// MatchNode calls cb if n matches a pattern. +func (p *Pattern) MatchNode(n ast.Node, cb func(MatchData)) { + p.m.values = map[string]ast.Node{} + if p.m.node(p.Expr, n) { + cb(MatchData{ + Values: p.m.values, + Node: n, + }) + } +} + +// Match calls cb for any pattern match found in n. +func (p *Pattern) Match(n ast.Node, cb func(MatchData)) { + cmd := exprCmd{name: "x", value: p.Expr} + matches := p.m.cmdRange(cmd, []submatch{{ + values: map[string]ast.Node{}, + node: n, + }}) + for _, match := range matches { + cb(MatchData{ + Values: match.values, + Node: match.node, + }) + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/load.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/load.go new file mode 100644 index 0000000..09ab3fd --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/load.go @@ -0,0 +1,72 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +package gogrep + +import ( + "fmt" + "sort" + "strings" + + "golang.org/x/tools/go/packages" +) + +func (m *matcher) load(wd string, args ...string) ([]*packages.Package, error) { + mode := packages.NeedName | packages.NeedImports | packages.NeedSyntax | + packages.NeedTypes | packages.NeedTypesInfo + if m.recursive { // need the syntax trees for the dependencies too + mode |= packages.NeedDeps + } + cfg := &packages.Config{ + Mode: mode, + Dir: wd, + Fset: m.fset, + Tests: m.tests, + } + pkgs, err := packages.Load(cfg, args...) + if err != nil { + return nil, err + } + jointErr := "" + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + for _, err := range pkg.Errors { + jointErr += err.Error() + "\n" + } + }) + if jointErr != "" { + return nil, fmt.Errorf("%s", jointErr) + } + + // Make a sorted list of the packages, including transitive dependencies + // if recurse is true. + byPath := make(map[string]*packages.Package) + var addDeps func(*packages.Package) + addDeps = func(pkg *packages.Package) { + if strings.HasSuffix(pkg.PkgPath, ".test") { + // don't add recursive test deps + return + } + for _, imp := range pkg.Imports { + if _, ok := byPath[imp.PkgPath]; ok { + continue // seen; avoid recursive call + } + byPath[imp.PkgPath] = imp + addDeps(imp) + } + } + for _, pkg := range pkgs { + byPath[pkg.PkgPath] = pkg + if m.recursive { + // add all dependencies once + addDeps(pkg) + } + } + pkgs = pkgs[:0] + for _, pkg := range byPath { + pkgs = append(pkgs, pkg) + } + sort.Slice(pkgs, func(i, j int) bool { + return pkgs[i].PkgPath < pkgs[j].PkgPath + }) + return pkgs, nil +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/main.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/main.go new file mode 100644 index 0000000..004cb32 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/main.go @@ -0,0 +1,332 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +package gogrep + +import ( + "bytes" + "flag" + "fmt" + "go/ast" + "go/build" + "go/printer" + "go/token" + "go/types" + "io" + "os" + "regexp" + "strconv" + "strings" +) + +var usage = func() { + fmt.Fprint(os.Stderr, `usage: gogrep commands [packages] + +gogrep performs a query on the given Go packages. + + -r search dependencies recursively too + -tests search test files too (and direct test deps, with -r) + +A command is one of the following: + + -x pattern find all nodes matching a pattern + -g pattern discard nodes not matching a pattern + -v pattern discard nodes matching a pattern + -a attribute discard nodes without an attribute + -s pattern substitute with a given syntax tree + -p number navigate up a number of node parents + -w write the entire source code back + +A pattern is a piece of Go code which may include dollar expressions. It can be +a number of statements, a number of expressions, a declaration, or an entire +file. + +A dollar expression consist of '$' and a name. Dollar expressions with the same +name within a query always match the same node, excluding "_". Example: + + -x '$x.$_ = $x' # assignment of self to a field in self + +If '*' is before the name, it will match any number of nodes. Example: + + -x 'fmt.Fprintf(os.Stdout, $*_)' # all Fprintfs on stdout + +By default, the resulting nodes will be printed one per line to standard output. +To update the input files, use -w. +`) +} + +func main() { + m := matcher{ + out: os.Stdout, + ctx: &build.Default, + } + err := m.fromArgs(".", os.Args[1:]) + if err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} + +type matcher struct { + out io.Writer + ctx *build.Context + + fset *token.FileSet + + parents map[ast.Node]ast.Node + + recursive, tests bool + aggressive bool + + // information about variables (wildcards), by id (which is an + // integer starting at 0) + vars []varInfo + + // node values recorded by name, excluding "_" (used only by the + // actual matching phase) + values map[string]ast.Node + scope *types.Scope + + *types.Info + stdImporter types.Importer +} + +type varInfo struct { + name string + any bool +} + +func (m *matcher) info(id int) varInfo { + if id < 0 { + return varInfo{} + } + return m.vars[id] +} + +type exprCmd struct { + name string + src string + value interface{} +} + +type strCmdFlag struct { + name string + cmds *[]exprCmd +} + +func (o *strCmdFlag) String() string { return "" } +func (o *strCmdFlag) Set(val string) error { + *o.cmds = append(*o.cmds, exprCmd{name: o.name, src: val}) + return nil +} + +type boolCmdFlag struct { + name string + cmds *[]exprCmd +} + +func (o *boolCmdFlag) String() string { return "" } +func (o *boolCmdFlag) Set(val string) error { + if val != "true" { + return fmt.Errorf("flag can only be true") + } + *o.cmds = append(*o.cmds, exprCmd{name: o.name}) + return nil +} +func (o *boolCmdFlag) IsBoolFlag() bool { return true } + +func (m *matcher) fromArgs(wd string, args []string) error { + m.fset = token.NewFileSet() + cmds, args, err := m.parseCmds(args) + if err != nil { + return err + } + pkgs, err := m.load(wd, args...) + if err != nil { + return err + } + var all []ast.Node + for _, pkg := range pkgs { + m.Info = pkg.TypesInfo + nodes := make([]ast.Node, len(pkg.Syntax)) + for i, f := range pkg.Syntax { + nodes[i] = f + } + all = append(all, m.matches(cmds, nodes)...) + } + for _, n := range all { + fpos := m.fset.Position(n.Pos()) + if strings.HasPrefix(fpos.Filename, wd) { + fpos.Filename = fpos.Filename[len(wd)+1:] + } + fmt.Fprintf(m.out, "%v: %s\n", fpos, singleLinePrint(n)) + } + return nil +} + +func (m *matcher) parseCmds(args []string) ([]exprCmd, []string, error) { + flagSet := flag.NewFlagSet("gogrep", flag.ExitOnError) + flagSet.Usage = usage + flagSet.BoolVar(&m.recursive, "r", false, "search dependencies recursively too") + flagSet.BoolVar(&m.tests, "tests", false, "search test files too (and direct test deps, with -r)") + + var cmds []exprCmd + flagSet.Var(&strCmdFlag{ + name: "x", + cmds: &cmds, + }, "x", "") + flagSet.Var(&strCmdFlag{ + name: "g", + cmds: &cmds, + }, "g", "") + flagSet.Var(&strCmdFlag{ + name: "v", + cmds: &cmds, + }, "v", "") + flagSet.Var(&strCmdFlag{ + name: "a", + cmds: &cmds, + }, "a", "") + flagSet.Var(&strCmdFlag{ + name: "s", + cmds: &cmds, + }, "s", "") + flagSet.Var(&strCmdFlag{ + name: "p", + cmds: &cmds, + }, "p", "") + flagSet.Var(&boolCmdFlag{ + name: "w", + cmds: &cmds, + }, "w", "") + flagSet.Parse(args) + paths := flagSet.Args() + + if len(cmds) < 1 { + return nil, nil, fmt.Errorf("need at least one command") + } + for i, cmd := range cmds { + switch cmd.name { + case "w": + continue // no expr + case "p": + n, err := strconv.Atoi(cmd.src) + if err != nil { + return nil, nil, err + } + cmds[i].value = n + case "a": + m, err := m.parseAttrs(cmd.src) + if err != nil { + return nil, nil, fmt.Errorf("cannot parse mods: %v", err) + } + cmds[i].value = m + default: + node, err := m.parseExpr(cmd.src) + if err != nil { + return nil, nil, err + } + cmds[i].value = node + } + } + return cmds, paths, nil +} + +type bufferJoinLines struct { + bytes.Buffer + last string +} + +var rxNeedSemicolon = regexp.MustCompile(`([])}a-zA-Z0-9"'` + "`" + `]|\+\+|--)$`) + +func (b *bufferJoinLines) Write(p []byte) (n int, err error) { + if string(p) == "\n" { + if b.last == "\n" { + return 1, nil + } + if rxNeedSemicolon.MatchString(b.last) { + b.Buffer.WriteByte(';') + } + b.Buffer.WriteByte(' ') + b.last = "\n" + return 1, nil + } + p = bytes.Trim(p, "\t") + n, err = b.Buffer.Write(p) + b.last = string(p) + return +} + +func (b *bufferJoinLines) String() string { + return strings.TrimSuffix(b.Buffer.String(), "; ") +} + +// inspect is like ast.Inspect, but it supports our extra nodeList Node +// type (only at the top level). +func inspect(node ast.Node, fn func(ast.Node) bool) { + // ast.Walk barfs on ast.Node types it doesn't know, so + // do the first level manually here + list, ok := node.(nodeList) + if !ok { + ast.Inspect(node, fn) + return + } + if !fn(list) { + return + } + for i := 0; i < list.len(); i++ { + ast.Inspect(list.at(i), fn) + } + fn(nil) +} + +var emptyFset = token.NewFileSet() + +func singleLinePrint(node ast.Node) string { + var buf bufferJoinLines + inspect(node, func(node ast.Node) bool { + bl, ok := node.(*ast.BasicLit) + if !ok || bl.Kind != token.STRING { + return true + } + if !strings.HasPrefix(bl.Value, "`") { + return true + } + if !strings.Contains(bl.Value, "\n") { + return true + } + bl.Value = strconv.Quote(bl.Value[1 : len(bl.Value)-1]) + return true + }) + printNode(&buf, emptyFset, node) + return buf.String() +} + +func printNode(w io.Writer, fset *token.FileSet, node ast.Node) { + switch x := node.(type) { + case exprList: + if len(x) == 0 { + return + } + printNode(w, fset, x[0]) + for _, n := range x[1:] { + fmt.Fprintf(w, ", ") + printNode(w, fset, n) + } + case stmtList: + if len(x) == 0 { + return + } + printNode(w, fset, x[0]) + for _, n := range x[1:] { + fmt.Fprintf(w, "; ") + printNode(w, fset, n) + } + default: + err := printer.Fprint(w, fset, node) + if err != nil && strings.Contains(err.Error(), "go/printer: unsupported node type") { + // Should never happen, but make it obvious when it does. + panic(fmt.Errorf("cannot print node %T: %v", node, err)) + } + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/match.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/match.go new file mode 100644 index 0000000..08b53d8 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/match.go @@ -0,0 +1,1108 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +package gogrep + +import ( + "fmt" + "go/ast" + "go/importer" + "go/token" + "go/types" + "regexp" + "strconv" +) + +func (m *matcher) matches(cmds []exprCmd, nodes []ast.Node) []ast.Node { + m.parents = make(map[ast.Node]ast.Node) + m.fillParents(nodes...) + initial := make([]submatch, len(nodes)) + for i, node := range nodes { + initial[i].node = node + initial[i].values = make(map[string]ast.Node) + } + final := m.submatches(cmds, initial) + finalNodes := make([]ast.Node, len(final)) + for i := range finalNodes { + finalNodes[i] = final[i].node + } + return finalNodes +} + +func (m *matcher) fillParents(nodes ...ast.Node) { + stack := make([]ast.Node, 1, 32) + for _, node := range nodes { + inspect(node, func(node ast.Node) bool { + if node == nil { + stack = stack[:len(stack)-1] + return true + } + if _, ok := node.(nodeList); !ok { + m.parents[node] = stack[len(stack)-1] + } + stack = append(stack, node) + return true + }) + } +} + +type submatch struct { + node ast.Node + values map[string]ast.Node +} + +func valsCopy(values map[string]ast.Node) map[string]ast.Node { + v2 := make(map[string]ast.Node, len(values)) + for k, v := range values { + v2[k] = v + } + return v2 +} + +func (m *matcher) submatches(cmds []exprCmd, subs []submatch) []submatch { + if len(cmds) == 0 { + return subs + } + cmd := cmds[0] + var fn func(exprCmd, []submatch) []submatch + switch cmd.name { + case "x": + fn = m.cmdRange + case "g": + fn = m.cmdFilter(true) + case "v": + fn = m.cmdFilter(false) + case "s": + fn = m.cmdSubst + case "a": + fn = m.cmdAttr + case "p": + fn = m.cmdParents + case "w": + if len(cmds) > 1 { + panic("-w must be the last command") + } + fn = m.cmdWrite + default: + panic(fmt.Sprintf("unknown command: %q", cmd.name)) + } + return m.submatches(cmds[1:], fn(cmd, subs)) +} + +func (m *matcher) cmdRange(cmd exprCmd, subs []submatch) []submatch { + var matches []submatch + seen := map[nodePosHash]bool{} + + // The values context for each new submatch must be a new copy + // from its parent submatch. If we don't do this copy, all the + // submatches would share the same map and have side effects. + var startValues map[string]ast.Node + + match := func(exprNode, node ast.Node) { + if node == nil { + return + } + m.values = valsCopy(startValues) + found := m.topNode(exprNode, node) + if found == nil { + return + } + hash := posHash(found) + if !seen[hash] { + matches = append(matches, submatch{ + node: found, + values: m.values, + }) + seen[hash] = true + } + } + for _, sub := range subs { + startValues = valsCopy(sub.values) + m.walkWithLists(cmd.value.(ast.Node), sub.node, match) + } + return matches +} + +func (m *matcher) cmdFilter(wantAny bool) func(exprCmd, []submatch) []submatch { + return func(cmd exprCmd, subs []submatch) []submatch { + var matches []submatch + any := false + match := func(exprNode, node ast.Node) { + if node == nil { + return + } + found := m.topNode(exprNode, node) + if found != nil { + any = true + } + } + for _, sub := range subs { + any = false + m.values = sub.values + m.walkWithLists(cmd.value.(ast.Node), sub.node, match) + if any == wantAny { + matches = append(matches, sub) + } + } + return matches + } +} + +func (m *matcher) cmdAttr(cmd exprCmd, subs []submatch) []submatch { + var matches []submatch + for _, sub := range subs { + m.values = sub.values + if m.attrApplies(sub.node, cmd.value.(attribute)) { + matches = append(matches, sub) + } + } + return matches +} + +func (m *matcher) cmdParents(cmd exprCmd, subs []submatch) []submatch { + for i := range subs { + sub := &subs[i] + reps := cmd.value.(int) + for j := 0; j < reps; j++ { + sub.node = m.parentOf(sub.node) + } + } + return subs +} + +func (m *matcher) attrApplies(node ast.Node, attr interface{}) bool { + if rx, ok := attr.(*regexp.Regexp); ok { + if exprStmt, ok := node.(*ast.ExprStmt); ok { + // since we prefer matching entire statements, get the + // ident from the ExprStmt + node = exprStmt.X + } + ident, ok := node.(*ast.Ident) + return ok && rx.MatchString(ident.Name) + } + expr, _ := node.(ast.Expr) + if expr == nil { + return false // only exprs have types + } + t := m.Info.TypeOf(expr) + if t == nil { + return false // an expr, but no type? + } + tv := m.Info.Types[expr] + switch x := attr.(type) { + case typeCheck: + want := m.resolveType(m.scope, x.expr) + switch { + case x.op == "type" && !types.Identical(t, want): + return false + case x.op == "asgn" && !types.AssignableTo(t, want): + return false + case x.op == "conv" && !types.ConvertibleTo(t, want): + return false + } + case typProperty: + switch { + case x == "comp" && !types.Comparable(t): + return false + case x == "addr" && !tv.Addressable(): + return false + } + case typUnderlying: + u := t.Underlying() + uok := true + switch x { + case "basic": + _, uok = u.(*types.Basic) + case "array": + _, uok = u.(*types.Array) + case "slice": + _, uok = u.(*types.Slice) + case "struct": + _, uok = u.(*types.Struct) + case "interface": + _, uok = u.(*types.Interface) + case "pointer": + _, uok = u.(*types.Pointer) + case "func": + _, uok = u.(*types.Signature) + case "map": + _, uok = u.(*types.Map) + case "chan": + _, uok = u.(*types.Chan) + } + if !uok { + return false + } + } + return true +} + +func (m *matcher) walkWithLists(exprNode, node ast.Node, fn func(exprNode, node ast.Node)) { + visit := func(node ast.Node) bool { + fn(exprNode, node) + for _, list := range nodeLists(node) { + fn(exprNode, list) + if id := m.wildAnyIdent(exprNode); id != nil { + // so that "$*a" will match "a, b" + fn(exprList([]ast.Expr{id}), list) + // so that "$*a" will match "a; b" + fn(toStmtList(id), list) + } + } + return true + } + inspect(node, visit) +} + +func (m *matcher) topNode(exprNode, node ast.Node) ast.Node { + sts1, ok1 := exprNode.(stmtList) + sts2, ok2 := node.(stmtList) + if ok1 && ok2 { + // allow a partial match at the top level + return m.nodes(sts1, sts2, true) + } + if m.node(exprNode, node) { + return node + } + return nil +} + +// optNode is like node, but for those nodes that can be nil and are not +// part of a list. For example, init and post statements in a for loop. +func (m *matcher) optNode(expr, node ast.Node) bool { + if ident := m.wildAnyIdent(expr); ident != nil { + if m.node(toStmtList(ident), toStmtList(node)) { + return true + } + } + return m.node(expr, node) +} + +func (m *matcher) node(expr, node ast.Node) bool { + switch node.(type) { + case *ast.File, *ast.FuncType, *ast.BlockStmt, *ast.IfStmt, + *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.CaseClause, + *ast.CommClause, *ast.ForStmt, *ast.RangeStmt: + if scope := m.Info.Scopes[node]; scope != nil { + m.scope = scope + } + } + if !m.aggressive { + if expr == nil || node == nil { + return expr == node + } + } else { + if expr == nil && node == nil { + return true + } + if node == nil { + expr, node = node, expr + } + } + switch x := expr.(type) { + case nil: // only in aggressive mode + y, ok := node.(*ast.Ident) + return ok && y.Name == "_" + + case *ast.File: + y, ok := node.(*ast.File) + if !ok || !m.node(x.Name, y.Name) || len(x.Decls) != len(y.Decls) || + len(x.Imports) != len(y.Imports) { + return false + } + for i, decl := range x.Decls { + if !m.node(decl, y.Decls[i]) { + return false + } + } + for i, imp := range x.Imports { + if !m.node(imp, y.Imports[i]) { + return false + } + } + return true + + case *ast.Ident: + y, yok := node.(*ast.Ident) + if !isWildName(x.Name) { + // not a wildcard + return yok && x.Name == y.Name + } + if _, ok := node.(ast.Node); !ok { + return false // to not include our extra node types + } + id := fromWildName(x.Name) + info := m.info(id) + if info.any { + return false + } + if info.name == "_" { + // values are discarded, matches anything + return true + } + prev, ok := m.values[info.name] + if !ok { + // first occurrence, record value + m.values[info.name] = node + return true + } + // multiple uses must match + return m.node(prev, node) + + // lists (ys are generated by us while walking) + case exprList: + y, ok := node.(exprList) + return ok && m.exprs(x, y) + case stmtList: + y, ok := node.(stmtList) + return ok && m.stmts(x, y) + + // lits + case *ast.BasicLit: + y, ok := node.(*ast.BasicLit) + return ok && x.Kind == y.Kind && x.Value == y.Value + case *ast.CompositeLit: + y, ok := node.(*ast.CompositeLit) + return ok && m.node(x.Type, y.Type) && m.exprs(x.Elts, y.Elts) + case *ast.FuncLit: + y, ok := node.(*ast.FuncLit) + return ok && m.node(x.Type, y.Type) && m.node(x.Body, y.Body) + + // types + case *ast.ArrayType: + y, ok := node.(*ast.ArrayType) + return ok && m.node(x.Len, y.Len) && m.node(x.Elt, y.Elt) + case *ast.MapType: + y, ok := node.(*ast.MapType) + return ok && m.node(x.Key, y.Key) && m.node(x.Value, y.Value) + case *ast.StructType: + y, ok := node.(*ast.StructType) + return ok && m.fields(x.Fields, y.Fields) + case *ast.Field: + // TODO: tags? + y, ok := node.(*ast.Field) + if !ok { + return false + } + if len(x.Names) == 0 && x.Tag == nil && m.node(x.Type, y) { + // Allow $var to match a field. + return true + } + return m.idents(x.Names, y.Names) && m.node(x.Type, y.Type) + case *ast.FuncType: + y, ok := node.(*ast.FuncType) + return ok && m.fields(x.Params, y.Params) && + m.fields(x.Results, y.Results) + case *ast.InterfaceType: + y, ok := node.(*ast.InterfaceType) + return ok && m.fields(x.Methods, y.Methods) + case *ast.ChanType: + y, ok := node.(*ast.ChanType) + return ok && x.Dir == y.Dir && m.node(x.Value, y.Value) + + // other exprs + case *ast.Ellipsis: + y, ok := node.(*ast.Ellipsis) + return ok && m.node(x.Elt, y.Elt) + case *ast.ParenExpr: + y, ok := node.(*ast.ParenExpr) + return ok && m.node(x.X, y.X) + case *ast.UnaryExpr: + y, ok := node.(*ast.UnaryExpr) + return ok && x.Op == y.Op && m.node(x.X, y.X) + case *ast.BinaryExpr: + y, ok := node.(*ast.BinaryExpr) + return ok && x.Op == y.Op && m.node(x.X, y.X) && m.node(x.Y, y.Y) + case *ast.CallExpr: + y, ok := node.(*ast.CallExpr) + return ok && m.node(x.Fun, y.Fun) && m.exprs(x.Args, y.Args) && + bothValid(x.Ellipsis, y.Ellipsis) + case *ast.KeyValueExpr: + y, ok := node.(*ast.KeyValueExpr) + return ok && m.node(x.Key, y.Key) && m.node(x.Value, y.Value) + case *ast.StarExpr: + y, ok := node.(*ast.StarExpr) + return ok && m.node(x.X, y.X) + case *ast.SelectorExpr: + y, ok := node.(*ast.SelectorExpr) + return ok && m.node(x.X, y.X) && m.node(x.Sel, y.Sel) + case *ast.IndexExpr: + y, ok := node.(*ast.IndexExpr) + return ok && m.node(x.X, y.X) && m.node(x.Index, y.Index) + case *ast.SliceExpr: + y, ok := node.(*ast.SliceExpr) + return ok && m.node(x.X, y.X) && m.node(x.Low, y.Low) && + m.node(x.High, y.High) && m.node(x.Max, y.Max) + case *ast.TypeAssertExpr: + y, ok := node.(*ast.TypeAssertExpr) + return ok && m.node(x.X, y.X) && m.node(x.Type, y.Type) + + // decls + case *ast.GenDecl: + y, ok := node.(*ast.GenDecl) + return ok && x.Tok == y.Tok && m.specs(x.Specs, y.Specs) + case *ast.FuncDecl: + y, ok := node.(*ast.FuncDecl) + return ok && m.fields(x.Recv, y.Recv) && m.node(x.Name, y.Name) && + m.node(x.Type, y.Type) && m.node(x.Body, y.Body) + + // specs + case *ast.ValueSpec: + y, ok := node.(*ast.ValueSpec) + if !ok || !m.node(x.Type, y.Type) { + return false + } + if m.aggressive && len(x.Names) == 1 { + for i := range y.Names { + if m.node(x.Names[i], y.Names[i]) && + (x.Values == nil || m.node(x.Values[i], y.Values[i])) { + return true + } + } + } + return m.idents(x.Names, y.Names) && m.exprs(x.Values, y.Values) + + // stmt bridge nodes + case *ast.ExprStmt: + if id, ok := x.X.(*ast.Ident); ok && isWildName(id.Name) { + // prefer matching $x as a statement, as it's + // the parent + return m.node(id, node) + } + y, ok := node.(*ast.ExprStmt) + return ok && m.node(x.X, y.X) + case *ast.DeclStmt: + y, ok := node.(*ast.DeclStmt) + return ok && m.node(x.Decl, y.Decl) + + // stmts + case *ast.EmptyStmt: + _, ok := node.(*ast.EmptyStmt) + return ok + case *ast.LabeledStmt: + y, ok := node.(*ast.LabeledStmt) + return ok && m.node(x.Label, y.Label) && m.node(x.Stmt, y.Stmt) + case *ast.SendStmt: + y, ok := node.(*ast.SendStmt) + return ok && m.node(x.Chan, y.Chan) && m.node(x.Value, y.Value) + case *ast.IncDecStmt: + y, ok := node.(*ast.IncDecStmt) + return ok && x.Tok == y.Tok && m.node(x.X, y.X) + case *ast.AssignStmt: + y, ok := node.(*ast.AssignStmt) + if !m.aggressive { + return ok && x.Tok == y.Tok && + m.exprs(x.Lhs, y.Lhs) && m.exprs(x.Rhs, y.Rhs) + } + if ok { + return m.exprs(x.Lhs, y.Lhs) && m.exprs(x.Rhs, y.Rhs) + } + vs, ok := node.(*ast.ValueSpec) + return ok && m.nodesMatch(exprList(x.Lhs), identList(vs.Names)) && + m.exprs(x.Rhs, vs.Values) + case *ast.GoStmt: + y, ok := node.(*ast.GoStmt) + return ok && m.node(x.Call, y.Call) + case *ast.DeferStmt: + y, ok := node.(*ast.DeferStmt) + return ok && m.node(x.Call, y.Call) + case *ast.ReturnStmt: + y, ok := node.(*ast.ReturnStmt) + return ok && m.exprs(x.Results, y.Results) + case *ast.BranchStmt: + y, ok := node.(*ast.BranchStmt) + return ok && x.Tok == y.Tok && m.node(maybeNilIdent(x.Label), maybeNilIdent(y.Label)) + case *ast.BlockStmt: + if m.aggressive && m.node(stmtList(x.List), node) { + return true + } + y, ok := node.(*ast.BlockStmt) + if !ok { + return false + } + if x == nil || y == nil { + return x == y + } + return m.cases(x.List, y.List) || m.stmts(x.List, y.List) + case *ast.IfStmt: + y, ok := node.(*ast.IfStmt) + if !ok { + return false + } + condAny := m.wildAnyIdent(x.Cond) + if condAny != nil && x.Init == nil { + // if $*x { ... } on the left + left := toStmtList(condAny) + return m.node(left, toStmtList(y.Init, y.Cond)) && + m.node(x.Body, y.Body) && m.optNode(x.Else, y.Else) + } + return m.optNode(x.Init, y.Init) && m.node(x.Cond, y.Cond) && + m.node(x.Body, y.Body) && m.node(x.Else, y.Else) + case *ast.CaseClause: + y, ok := node.(*ast.CaseClause) + return ok && m.exprs(x.List, y.List) && m.stmts(x.Body, y.Body) + case *ast.SwitchStmt: + y, ok := node.(*ast.SwitchStmt) + if !ok { + return false + } + tagAny := m.wildAnyIdent(x.Tag) + if tagAny != nil && x.Init == nil { + // switch $*x { ... } on the left + left := toStmtList(tagAny) + return m.node(left, toStmtList(y.Init, y.Tag)) && + m.node(x.Body, y.Body) + } + return m.optNode(x.Init, y.Init) && m.node(x.Tag, y.Tag) && m.node(x.Body, y.Body) + case *ast.TypeSwitchStmt: + y, ok := node.(*ast.TypeSwitchStmt) + return ok && m.optNode(x.Init, y.Init) && m.node(x.Assign, y.Assign) && m.node(x.Body, y.Body) + case *ast.CommClause: + y, ok := node.(*ast.CommClause) + return ok && m.node(x.Comm, y.Comm) && m.stmts(x.Body, y.Body) + case *ast.SelectStmt: + y, ok := node.(*ast.SelectStmt) + return ok && m.node(x.Body, y.Body) + case *ast.ForStmt: + condIdent := m.wildAnyIdent(x.Cond) + if condIdent != nil && x.Init == nil && x.Post == nil { + // "for $*x { ... }" on the left + left := toStmtList(condIdent) + // also accept RangeStmt on the right + switch y := node.(type) { + case *ast.ForStmt: + return m.node(left, toStmtList(y.Init, y.Cond, y.Post)) && + m.node(x.Body, y.Body) + case *ast.RangeStmt: + return m.node(left, toStmtList(y.Key, y.Value, y.X)) && + m.node(x.Body, y.Body) + default: + return false + } + } + y, ok := node.(*ast.ForStmt) + if !ok { + return false + } + return m.optNode(x.Init, y.Init) && m.node(x.Cond, y.Cond) && + m.optNode(x.Post, y.Post) && m.node(x.Body, y.Body) + case *ast.RangeStmt: + y, ok := node.(*ast.RangeStmt) + return ok && m.node(x.Key, y.Key) && m.node(x.Value, y.Value) && + m.node(x.X, y.X) && m.node(x.Body, y.Body) + + case *ast.TypeSpec: + y, ok := node.(*ast.TypeSpec) + return ok && m.node(x.Name, y.Name) && m.node(x.Type, y.Type) + + case *ast.FieldList: + // we ignore these, for now + return false + default: + panic(fmt.Sprintf("unexpected node: %T", x)) + } +} + +func (m *matcher) wildAnyIdent(node ast.Node) *ast.Ident { + switch x := node.(type) { + case *ast.ExprStmt: + return m.wildAnyIdent(x.X) + case *ast.Ident: + if !isWildName(x.Name) { + return nil + } + if !m.info(fromWildName(x.Name)).any { + return nil + } + return x + } + return nil +} + +// resolveType resolves a type expression from a given scope. +func (m *matcher) resolveType(scope *types.Scope, expr ast.Expr) types.Type { + switch x := expr.(type) { + case *ast.Ident: + _, obj := scope.LookupParent(x.Name, token.NoPos) + if obj == nil { + // TODO: error if all resolveType calls on a type + // expression fail? or perhaps resolve type expressions + // across the entire program? + return nil + } + return obj.Type() + case *ast.ArrayType: + elt := m.resolveType(scope, x.Elt) + if x.Len == nil { + return types.NewSlice(elt) + } + bl, ok := x.Len.(*ast.BasicLit) + if !ok || bl.Kind != token.INT { + panic(fmt.Sprintf("TODO: %T", x)) + } + len, _ := strconv.ParseInt(bl.Value, 0, 0) + return types.NewArray(elt, len) + case *ast.StarExpr: + return types.NewPointer(m.resolveType(scope, x.X)) + case *ast.ChanType: + dir := types.SendRecv + switch x.Dir { + case ast.SEND: + dir = types.SendOnly + case ast.RECV: + dir = types.RecvOnly + } + return types.NewChan(dir, m.resolveType(scope, x.Value)) + case *ast.SelectorExpr: + scope = m.findScope(scope, x.X) + return m.resolveType(scope, x.Sel) + default: + panic(fmt.Sprintf("resolveType TODO: %T", x)) + } +} + +func (m *matcher) findScope(scope *types.Scope, expr ast.Expr) *types.Scope { + switch x := expr.(type) { + case *ast.Ident: + _, obj := scope.LookupParent(x.Name, token.NoPos) + if pkg, ok := obj.(*types.PkgName); ok { + return pkg.Imported().Scope() + } + // try to fall back to std + if m.stdImporter == nil { + m.stdImporter = importer.Default() + } + path := x.Name + if longer, ok := stdImportFixes[path]; ok { + path = longer + } + pkg, err := m.stdImporter.Import(path) + if err != nil { + panic(fmt.Sprintf("findScope err: %v", err)) + } + return pkg.Scope() + default: + panic(fmt.Sprintf("findScope TODO: %T", x)) + } +} + +var stdImportFixes = map[string]string{ + // go list std | grep -vE 'vendor|internal' | grep '/' | sed -r 's@^(.*)/([^/]*)$@"\2": "\1/\2",@' | sort + // (after commenting out the less likely duplicates) + "adler32": "hash/adler32", + "aes": "crypto/aes", + "ascii85": "encoding/ascii85", + "asn1": "encoding/asn1", + "ast": "go/ast", + "atomic": "sync/atomic", + "base32": "encoding/base32", + "base64": "encoding/base64", + "big": "math/big", + "binary": "encoding/binary", + "bits": "math/bits", + "build": "go/build", + "bzip2": "compress/bzip2", + "cgi": "net/http/cgi", + "cgo": "runtime/cgo", + "cipher": "crypto/cipher", + "cmplx": "math/cmplx", + "color": "image/color", + "constant": "go/constant", + "cookiejar": "net/http/cookiejar", + "crc32": "hash/crc32", + "crc64": "hash/crc64", + "csv": "encoding/csv", + "debug": "runtime/debug", + "des": "crypto/des", + "doc": "go/doc", + "draw": "image/draw", + "driver": "database/sql/driver", + "dsa": "crypto/dsa", + "dwarf": "debug/dwarf", + "ecdsa": "crypto/ecdsa", + "elf": "debug/elf", + "elliptic": "crypto/elliptic", + "exec": "os/exec", + "fcgi": "net/http/fcgi", + "filepath": "path/filepath", + "flate": "compress/flate", + "fnv": "hash/fnv", + "format": "go/format", + "gif": "image/gif", + "gob": "encoding/gob", + "gosym": "debug/gosym", + "gzip": "compress/gzip", + "heap": "container/heap", + "hex": "encoding/hex", + "hmac": "crypto/hmac", + "http": "net/http", + "httptest": "net/http/httptest", + "httptrace": "net/http/httptrace", + "httputil": "net/http/httputil", + "importer": "go/importer", + "iotest": "testing/iotest", + "ioutil": "io/ioutil", + "jpeg": "image/jpeg", + "json": "encoding/json", + "jsonrpc": "net/rpc/jsonrpc", + "list": "container/list", + "lzw": "compress/lzw", + "macho": "debug/macho", + "mail": "net/mail", + "md5": "crypto/md5", + "multipart": "mime/multipart", + "palette": "image/color/palette", + "parser": "go/parser", + "parse": "text/template/parse", + "pe": "debug/pe", + "pem": "encoding/pem", + "pkix": "crypto/x509/pkix", + "plan9obj": "debug/plan9obj", + "png": "image/png", + //"pprof": "net/http/pprof", + "pprof": "runtime/pprof", + "printer": "go/printer", + "quick": "testing/quick", + "quotedprintable": "mime/quotedprintable", + "race": "runtime/race", + //"rand": "crypto/rand", + "rand": "math/rand", + "rc4": "crypto/rc4", + "ring": "container/ring", + "rpc": "net/rpc", + "rsa": "crypto/rsa", + //"scanner": "go/scanner", + "scanner": "text/scanner", + "sha1": "crypto/sha1", + "sha256": "crypto/sha256", + "sha512": "crypto/sha512", + "signal": "os/signal", + "smtp": "net/smtp", + "sql": "database/sql", + "subtle": "crypto/subtle", + "suffixarray": "index/suffixarray", + "syntax": "regexp/syntax", + "syslog": "log/syslog", + "tabwriter": "text/tabwriter", + "tar": "archive/tar", + //"template": "html/template", + "template": "text/template", + "textproto": "net/textproto", + "tls": "crypto/tls", + "token": "go/token", + "trace": "runtime/trace", + "types": "go/types", + "url": "net/url", + "user": "os/user", + "utf16": "unicode/utf16", + "utf8": "unicode/utf8", + "x509": "crypto/x509", + "xml": "encoding/xml", + "zip": "archive/zip", + "zlib": "compress/zlib", +} + +func maybeNilIdent(x *ast.Ident) ast.Node { + if x == nil { + return nil + } + return x +} + +func bothValid(p1, p2 token.Pos) bool { + return p1.IsValid() == p2.IsValid() +} + +type nodeList interface { + at(i int) ast.Node + len() int + slice(from, to int) nodeList + ast.Node +} + +// nodes matches two lists of nodes. It uses a common algorithm to match +// wildcard patterns with any number of nodes without recursion. +func (m *matcher) nodes(ns1, ns2 nodeList, partial bool) ast.Node { + ns1len, ns2len := ns1.len(), ns2.len() + if ns1len == 0 { + if ns2len == 0 { + return ns2 + } + return nil + } + partialStart, partialEnd := 0, ns2len + i1, i2 := 0, 0 + next1, next2 := 0, 0 + + // We need to keep a copy of m.values so that we can restart + // with a different "any of" match while discarding any matches + // we found while trying it. + type restart struct { + matches map[string]ast.Node + next1, next2 int + } + // We need to stack these because otherwise some edge cases + // would not match properly. Since we have various kinds of + // wildcards (nodes containing them, $_, and $*_), in some cases + // we may have to go back and do multiple restarts to get to the + // right starting position. + var stack []restart + push := func(n1, n2 int) { + if n2 > ns2len { + return // would be discarded anyway + } + stack = append(stack, restart{valsCopy(m.values), n1, n2}) + next1, next2 = n1, n2 + } + pop := func() { + i1, i2 = next1, next2 + m.values = stack[len(stack)-1].matches + stack = stack[:len(stack)-1] + next1, next2 = 0, 0 + if len(stack) > 0 { + next1 = stack[len(stack)-1].next1 + next2 = stack[len(stack)-1].next2 + } + } + wildName := "" + wildStart := 0 + + // wouldMatch returns whether the current wildcard - if any - + // matches the nodes we are currently trying it on. + wouldMatch := func() bool { + switch wildName { + case "", "_": + return true + } + list := ns2.slice(wildStart, i2) + // check that it matches any nodes found elsewhere + prev, ok := m.values[wildName] + if ok && !m.node(prev, list) { + return false + } + m.values[wildName] = list + return true + } + for i1 < ns1len || i2 < ns2len { + if i1 < ns1len { + n1 := ns1.at(i1) + id := fromWildNode(n1) + info := m.info(id) + if info.any { + // keep track of where this wildcard + // started (if info.name == wildName, + // we're trying the same wildcard + // matching one more node) + if info.name != wildName { + wildStart = i2 + wildName = info.name + } + // try to match zero or more at i2, + // restarting at i2+1 if it fails + push(i1, i2+1) + i1++ + continue + } + if partial && i1 == 0 { + // let "b; c" match "a; b; c" + // (simulates a $*_ at the beginning) + partialStart = i2 + push(i1, i2+1) + } + if i2 < ns2len && wouldMatch() && m.node(n1, ns2.at(i2)) { + wildName = "" + // ordinary match + i1++ + i2++ + continue + } + } + if partial && i1 == ns1len && wildName == "" { + partialEnd = i2 + break // let "b; c" match "b; c; d" + } + // mismatch, try to restart + if 0 < next2 && next2 <= ns2len && (i1 != next1 || i2 != next2) { + pop() + continue + } + return nil + } + if !wouldMatch() { + return nil + } + return ns2.slice(partialStart, partialEnd) +} + +func (m *matcher) nodesMatch(list1, list2 nodeList) bool { + return m.nodes(list1, list2, false) != nil +} + +func (m *matcher) exprs(exprs1, exprs2 []ast.Expr) bool { + return m.nodesMatch(exprList(exprs1), exprList(exprs2)) +} + +func (m *matcher) idents(ids1, ids2 []*ast.Ident) bool { + return m.nodesMatch(identList(ids1), identList(ids2)) +} + +func toStmtList(nodes ...ast.Node) stmtList { + var stmts []ast.Stmt + for _, node := range nodes { + switch x := node.(type) { + case nil: + case ast.Stmt: + stmts = append(stmts, x) + case ast.Expr: + stmts = append(stmts, &ast.ExprStmt{X: x}) + default: + panic(fmt.Sprintf("unexpected node type: %T", x)) + } + } + return stmtList(stmts) +} + +func (m *matcher) cases(stmts1, stmts2 []ast.Stmt) bool { + for _, stmt := range stmts2 { + switch stmt.(type) { + case *ast.CaseClause, *ast.CommClause: + default: + return false + } + } + var left []*ast.Ident + for _, stmt := range stmts1 { + var expr ast.Expr + var bstmt ast.Stmt + switch x := stmt.(type) { + case *ast.CaseClause: + if len(x.List) != 1 || len(x.Body) != 1 { + return false + } + expr, bstmt = x.List[0], x.Body[0] + case *ast.CommClause: + if x.Comm == nil || len(x.Body) != 1 { + return false + } + if commExpr, ok := x.Comm.(*ast.ExprStmt); ok { + expr = commExpr.X + } + bstmt = x.Body[0] + default: + return false + } + xs, ok := bstmt.(*ast.ExprStmt) + if !ok { + return false + } + bodyIdent, ok := xs.X.(*ast.Ident) + if !ok || bodyIdent.Name != "gogrep_body" { + return false + } + id, ok := expr.(*ast.Ident) + if !ok || !isWildName(id.Name) { + return false + } + left = append(left, id) + } + return m.nodesMatch(identList(left), stmtList(stmts2)) +} + +func (m *matcher) stmts(stmts1, stmts2 []ast.Stmt) bool { + return m.nodesMatch(stmtList(stmts1), stmtList(stmts2)) +} + +func (m *matcher) specs(specs1, specs2 []ast.Spec) bool { + return m.nodesMatch(specList(specs1), specList(specs2)) +} + +func (m *matcher) fields(fields1, fields2 *ast.FieldList) bool { + if fields1 == nil || fields2 == nil { + return fields1 == fields2 + } + return m.nodesMatch(fieldList(fields1.List), fieldList(fields2.List)) +} + +func fromWildNode(node ast.Node) int { + switch node := node.(type) { + case *ast.Ident: + return fromWildName(node.Name) + case *ast.ExprStmt: + return fromWildNode(node.X) + case *ast.Field: + // Allow $var to represent an entire field; the lone identifier + // gets picked up as an anonymous field. + if len(node.Names) == 0 && node.Tag == nil { + return fromWildNode(node.Type) + } + } + return -1 +} + +func nodeLists(n ast.Node) []nodeList { + var lists []nodeList + addList := func(list nodeList) { + if list.len() > 0 { + lists = append(lists, list) + } + } + switch x := n.(type) { + case nodeList: + addList(x) + case *ast.CompositeLit: + addList(exprList(x.Elts)) + case *ast.CallExpr: + addList(exprList(x.Args)) + case *ast.AssignStmt: + addList(exprList(x.Lhs)) + addList(exprList(x.Rhs)) + case *ast.ReturnStmt: + addList(exprList(x.Results)) + case *ast.ValueSpec: + addList(exprList(x.Values)) + case *ast.BlockStmt: + addList(stmtList(x.List)) + case *ast.CaseClause: + addList(exprList(x.List)) + addList(stmtList(x.Body)) + case *ast.CommClause: + addList(stmtList(x.Body)) + } + return lists +} + +type exprList []ast.Expr +type identList []*ast.Ident +type stmtList []ast.Stmt +type specList []ast.Spec +type fieldList []*ast.Field + +func (l exprList) len() int { return len(l) } +func (l identList) len() int { return len(l) } +func (l stmtList) len() int { return len(l) } +func (l specList) len() int { return len(l) } +func (l fieldList) len() int { return len(l) } + +func (l exprList) at(i int) ast.Node { return l[i] } +func (l identList) at(i int) ast.Node { return l[i] } +func (l stmtList) at(i int) ast.Node { return l[i] } +func (l specList) at(i int) ast.Node { return l[i] } +func (l fieldList) at(i int) ast.Node { return l[i] } + +func (l exprList) slice(i, j int) nodeList { return l[i:j] } +func (l identList) slice(i, j int) nodeList { return l[i:j] } +func (l stmtList) slice(i, j int) nodeList { return l[i:j] } +func (l specList) slice(i, j int) nodeList { return l[i:j] } +func (l fieldList) slice(i, j int) nodeList { return l[i:j] } + +func (l exprList) Pos() token.Pos { return l[0].Pos() } +func (l identList) Pos() token.Pos { return l[0].Pos() } +func (l stmtList) Pos() token.Pos { return l[0].Pos() } +func (l specList) Pos() token.Pos { return l[0].Pos() } +func (l fieldList) Pos() token.Pos { return l[0].Pos() } + +func (l exprList) End() token.Pos { return l[len(l)-1].End() } +func (l identList) End() token.Pos { return l[len(l)-1].End() } +func (l stmtList) End() token.Pos { return l[len(l)-1].End() } +func (l specList) End() token.Pos { return l[len(l)-1].End() } +func (l fieldList) End() token.Pos { return l[len(l)-1].End() } diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/parse.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/parse.go new file mode 100644 index 0000000..b46e643 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/parse.go @@ -0,0 +1,452 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +package gogrep + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "regexp" + "strconv" + "strings" + "text/template" +) + +func (m *matcher) transformSource(expr string) (string, []posOffset, error) { + toks, err := m.tokenize([]byte(expr)) + if err != nil { + return "", nil, fmt.Errorf("cannot tokenize expr: %v", err) + } + var offs []posOffset + lbuf := lineColBuffer{line: 1, col: 1} + addOffset := func(length int) { + lbuf.offs -= length + offs = append(offs, posOffset{ + atLine: lbuf.line, + atCol: lbuf.col, + offset: length, + }) + } + if len(toks) > 0 && toks[0].tok == tokAggressive { + toks = toks[1:] + m.aggressive = true + } + lastLit := false + for _, t := range toks { + if lbuf.offs >= t.pos.Offset && lastLit && t.lit != "" { + lbuf.WriteString(" ") + } + for lbuf.offs < t.pos.Offset { + lbuf.WriteString(" ") + } + if t.lit == "" { + lbuf.WriteString(t.tok.String()) + lastLit = false + continue + } + if isWildName(t.lit) { + // to correct the position offsets for the extra + // info attached to ident name strings + addOffset(len(wildPrefix) - 1) + } + lbuf.WriteString(t.lit) + lastLit = strings.TrimSpace(t.lit) != "" + } + // trailing newlines can cause issues with commas + return strings.TrimSpace(lbuf.String()), offs, nil +} + +func (m *matcher) parseExpr(expr string) (ast.Node, error) { + exprStr, offs, err := m.transformSource(expr) + if err != nil { + return nil, err + } + node, _, err := parseDetectingNode(m.fset, exprStr) + if err != nil { + err = subPosOffsets(err, offs...) + return nil, fmt.Errorf("cannot parse expr: %v", err) + } + return node, nil +} + +type lineColBuffer struct { + bytes.Buffer + line, col, offs int +} + +func (l *lineColBuffer) WriteString(s string) (n int, err error) { + for _, r := range s { + if r == '\n' { + l.line++ + l.col = 1 + } else { + l.col++ + } + l.offs++ + } + return l.Buffer.WriteString(s) +} + +var tmplDecl = template.Must(template.New("").Parse(`` + + `package p; {{ . }}`)) + +var tmplExprs = template.Must(template.New("").Parse(`` + + `package p; var _ = []interface{}{ {{ . }}, }`)) + +var tmplStmts = template.Must(template.New("").Parse(`` + + `package p; func _() { {{ . }} }`)) + +var tmplType = template.Must(template.New("").Parse(`` + + `package p; var _ {{ . }}`)) + +var tmplValSpec = template.Must(template.New("").Parse(`` + + `package p; var {{ . }}`)) + +func execTmpl(tmpl *template.Template, src string) string { + var buf bytes.Buffer + if err := tmpl.Execute(&buf, src); err != nil { + panic(err) + } + return buf.String() +} + +func noBadNodes(node ast.Node) bool { + any := false + ast.Inspect(node, func(n ast.Node) bool { + if any { + return false + } + switch n.(type) { + case *ast.BadExpr, *ast.BadDecl: + any = true + } + return true + }) + return !any +} + +func parseType(fset *token.FileSet, src string) (ast.Expr, *ast.File, error) { + asType := execTmpl(tmplType, src) + f, err := parser.ParseFile(fset, "", asType, 0) + if err != nil { + err = subPosOffsets(err, posOffset{1, 1, 17}) + return nil, nil, err + } + vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) + return vs.Type, f, nil +} + +// parseDetectingNode tries its best to parse the ast.Node contained in src, as +// one of: *ast.File, ast.Decl, ast.Expr, ast.Stmt, *ast.ValueSpec. +// It also returns the *ast.File used for the parsing, so that the returned node +// can be easily type-checked. +func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, *ast.File, error) { + file := fset.AddFile("", fset.Base(), len(src)) + scan := scanner.Scanner{} + scan.Init(file, []byte(src), nil, 0) + if _, tok, _ := scan.Scan(); tok == token.EOF { + return nil, nil, fmt.Errorf("empty source code") + } + var mainErr error + + // first try as a whole file + if f, err := parser.ParseFile(fset, "", src, 0); err == nil && noBadNodes(f) { + return f, f, nil + } + + // then as a single declaration, or many + asDecl := execTmpl(tmplDecl, src) + if f, err := parser.ParseFile(fset, "", asDecl, 0); err == nil && noBadNodes(f) { + if len(f.Decls) == 1 { + return f.Decls[0], f, nil + } + return f, f, nil + } + + // then as value expressions + asExprs := execTmpl(tmplExprs, src) + if f, err := parser.ParseFile(fset, "", asExprs, 0); err == nil && noBadNodes(f) { + vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) + cl := vs.Values[0].(*ast.CompositeLit) + if len(cl.Elts) == 1 { + return cl.Elts[0], f, nil + } + return exprList(cl.Elts), f, nil + } + + // then try as statements + asStmts := execTmpl(tmplStmts, src) + if f, err := parser.ParseFile(fset, "", asStmts, 0); err == nil && noBadNodes(f) { + bl := f.Decls[0].(*ast.FuncDecl).Body + if len(bl.List) == 1 { + return bl.List[0], f, nil + } + return stmtList(bl.List), f, nil + } else { + // Statements is what covers most cases, so it will give + // the best overall error message. Show positions + // relative to where the user's code is put in the + // template. + mainErr = subPosOffsets(err, posOffset{1, 1, 22}) + } + + // type expressions not yet picked up, for e.g. chans and interfaces + if typ, f, err := parseType(fset, src); err == nil && noBadNodes(f) { + return typ, f, nil + } + + // value specs + asValSpec := execTmpl(tmplValSpec, src) + if f, err := parser.ParseFile(fset, "", asValSpec, 0); err == nil && noBadNodes(f) { + vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) + return vs, f, nil + } + return nil, nil, mainErr +} + +type posOffset struct { + atLine, atCol int + offset int +} + +func subPosOffsets(err error, offs ...posOffset) error { + list, ok := err.(scanner.ErrorList) + if !ok { + return err + } + for i, err := range list { + for _, off := range offs { + if err.Pos.Line != off.atLine { + continue + } + if err.Pos.Column < off.atCol { + continue + } + err.Pos.Column -= off.offset + } + list[i] = err + } + return list +} + +const ( + _ token.Token = -iota + tokAggressive +) + +type fullToken struct { + pos token.Position + tok token.Token + lit string +} + +type caseStatus uint + +const ( + caseNone caseStatus = iota + caseNeedBlock + caseHere +) + +func (m *matcher) tokenize(src []byte) ([]fullToken, error) { + var s scanner.Scanner + fset := token.NewFileSet() + file := fset.AddFile("", fset.Base(), len(src)) + + var err error + onError := func(pos token.Position, msg string) { + switch msg { // allow certain extra chars + case `illegal character U+0024 '$'`: + case `illegal character U+007E '~'`: + default: + err = fmt.Errorf("%v: %s", pos, msg) + } + } + + // we will modify the input source under the scanner's nose to + // enable some features such as regexes. + s.Init(file, src, onError, scanner.ScanComments) + + next := func() fullToken { + pos, tok, lit := s.Scan() + return fullToken{fset.Position(pos), tok, lit} + } + + caseStat := caseNone + + var toks []fullToken + for t := next(); t.tok != token.EOF; t = next() { + switch t.lit { + case "$": // continues below + case "~": + toks = append(toks, fullToken{t.pos, tokAggressive, ""}) + continue + case "switch", "select", "case": + if t.lit == "case" { + caseStat = caseNone + } else { + caseStat = caseNeedBlock + } + fallthrough + default: // regular Go code + if t.tok == token.LBRACE && caseStat == caseNeedBlock { + caseStat = caseHere + } + toks = append(toks, t) + continue + } + wt, err := m.wildcard(t.pos, next) + if err != nil { + return nil, err + } + if caseStat == caseHere { + toks = append(toks, fullToken{wt.pos, token.IDENT, "case"}) + } + toks = append(toks, wt) + if caseStat == caseHere { + toks = append(toks, fullToken{wt.pos, token.COLON, ""}) + toks = append(toks, fullToken{wt.pos, token.IDENT, "gogrep_body"}) + } + } + return toks, err +} + +func (m *matcher) wildcard(pos token.Position, next func() fullToken) (fullToken, error) { + wt := fullToken{pos, token.IDENT, wildPrefix} + t := next() + var info varInfo + if t.tok == token.MUL { + t = next() + info.any = true + } + if t.tok != token.IDENT { + return wt, fmt.Errorf("%v: $ must be followed by ident, got %v", + t.pos, t.tok) + } + id := len(m.vars) + wt.lit += strconv.Itoa(id) + info.name = t.lit + m.vars = append(m.vars, info) + return wt, nil +} + +type typeCheck struct { + op string // "type", "asgn", "conv" + expr ast.Expr +} + +type attribute interface{} + +type typProperty string + +type typUnderlying string + +func (m *matcher) parseAttrs(src string) (attribute, error) { + toks, err := m.tokenize([]byte(src)) + if err != nil { + return nil, err + } + i := -1 + var t fullToken + next := func() fullToken { + if i++; i < len(toks) { + return toks[i] + } + return fullToken{tok: token.EOF, pos: t.pos} + } + t = next() + op := t.lit + switch op { // the ones that don't take args + case "comp", "addr": + if t = next(); t.tok != token.SEMICOLON { + return nil, fmt.Errorf("%v: wanted EOF, got %v", t.pos, t.tok) + } + return typProperty(op), nil + } + opPos := t.pos + if t = next(); t.tok != token.LPAREN { + return nil, fmt.Errorf("%v: wanted (", t.pos) + } + var attr attribute + switch op { + case "rx": + t = next() + rxStr, err := strconv.Unquote(t.lit) + if err != nil { + return nil, fmt.Errorf("%v: %v", t.pos, err) + } + if !strings.HasPrefix(rxStr, "^") { + rxStr = "^" + rxStr + } + if !strings.HasSuffix(rxStr, "$") { + rxStr = rxStr + "$" + } + rx, err := regexp.Compile(rxStr) + if err != nil { + return nil, fmt.Errorf("%v: %v", t.pos, err) + } + attr = rx + case "type", "asgn", "conv": + t = next() + start := t.pos.Offset + for open := 1; open > 0; t = next() { + switch t.tok { + case token.LPAREN: + open++ + case token.RPAREN: + open-- + case token.EOF: + return nil, fmt.Errorf("%v: expected ) to close (", t.pos) + } + } + end := t.pos.Offset - 1 + typeStr := strings.TrimSpace(string(src[start:end])) + fset := token.NewFileSet() + typeExpr, _, err := parseType(fset, typeStr) + if err != nil { + return nil, err + } + attr = typeCheck{op, typeExpr} + i -= 2 // since we went past RPAREN above + case "is": + switch t = next(); t.lit { + case "basic", "array", "slice", "struct", "interface", + "pointer", "func", "map", "chan": + default: + return nil, fmt.Errorf("%v: unknown type: %q", t.pos, + t.lit) + } + attr = typUnderlying(t.lit) + default: + return nil, fmt.Errorf("%v: unknown op %q", opPos, op) + } + if t = next(); t.tok != token.RPAREN { + return nil, fmt.Errorf("%v: wanted ), got %v", t.pos, t.tok) + } + if t = next(); t.tok != token.SEMICOLON { + return nil, fmt.Errorf("%v: wanted EOF, got %v", t.pos, t.tok) + } + return attr, nil +} + +// using a prefix is good enough for now +const wildPrefix = "gogrep_" + +func isWildName(name string) bool { + return strings.HasPrefix(name, wildPrefix) +} + +func fromWildName(s string) int { + if !isWildName(s) { + return -1 + } + n, err := strconv.Atoi(s[len(wildPrefix):]) + if err != nil { + return -1 + } + return n +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/subst.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/subst.go new file mode 100644 index 0000000..8870858 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/subst.go @@ -0,0 +1,261 @@ +// Copyright (c) 2018, Daniel Martí +// See LICENSE for licensing information + +package gogrep + +import ( + "fmt" + "go/ast" + "go/token" + "reflect" +) + +func (m *matcher) cmdSubst(cmd exprCmd, subs []submatch) []submatch { + for i := range subs { + sub := &subs[i] + nodeCopy, _ := m.parseExpr(cmd.src) + // since we'll want to set positions within the file's + // FileSet + scrubPositions(nodeCopy) + + m.fillParents(nodeCopy) + nodeCopy = m.fillValues(nodeCopy, sub.values) + m.substNode(sub.node, nodeCopy) + sub.node = nodeCopy + } + return subs +} + +type topNode struct { + Node ast.Node +} + +func (t topNode) Pos() token.Pos { return t.Node.Pos() } +func (t topNode) End() token.Pos { return t.Node.End() } + +func (m *matcher) fillValues(node ast.Node, values map[string]ast.Node) ast.Node { + // node might not have a parent, in which case we need to set an + // artificial one. Its pointer interface is a copy, so we must also + // return it. + top := &topNode{node} + m.setParentOf(node, top) + + inspect(node, func(node ast.Node) bool { + id := fromWildNode(node) + info := m.info(id) + if info.name == "" { + return true + } + prev := values[info.name] + switch prev.(type) { + case exprList: + node = exprList([]ast.Expr{ + node.(*ast.Ident), + }) + case stmtList: + if ident, ok := node.(*ast.Ident); ok { + node = &ast.ExprStmt{X: ident} + } + node = stmtList([]ast.Stmt{ + node.(*ast.ExprStmt), + }) + } + m.substNode(node, prev) + return true + }) + m.setParentOf(node, nil) + return top.Node +} + +func (m *matcher) substNode(oldNode, newNode ast.Node) { + parent := m.parentOf(oldNode) + m.setParentOf(newNode, parent) + + ptr := m.nodePtr(oldNode) + switch x := ptr.(type) { + case **ast.Ident: + *x = newNode.(*ast.Ident) + case *ast.Node: + *x = newNode + case *ast.Expr: + *x = newNode.(ast.Expr) + case *ast.Stmt: + switch y := newNode.(type) { + case ast.Expr: + stmt := &ast.ExprStmt{X: y} + m.setParentOf(stmt, parent) + *x = stmt + case ast.Stmt: + *x = y + default: + panic(fmt.Sprintf("cannot replace stmt with %T", y)) + } + case *[]ast.Expr: + oldList := oldNode.(exprList) + var first, last []ast.Expr + for i, expr := range *x { + if expr == oldList[0] { + first = (*x)[:i] + last = (*x)[i+len(oldList):] + break + } + } + switch y := newNode.(type) { + case ast.Expr: + *x = append(first, y) + case exprList: + *x = append(first, y...) + default: + panic(fmt.Sprintf("cannot replace exprs with %T", y)) + } + *x = append(*x, last...) + case *[]ast.Stmt: + oldList := oldNode.(stmtList) + var first, last []ast.Stmt + for i, stmt := range *x { + if stmt == oldList[0] { + first = (*x)[:i] + last = (*x)[i+len(oldList):] + break + } + } + switch y := newNode.(type) { + case ast.Expr: + stmt := &ast.ExprStmt{X: y} + m.setParentOf(stmt, parent) + *x = append(first, stmt) + case ast.Stmt: + *x = append(first, y) + case stmtList: + *x = append(first, y...) + default: + panic(fmt.Sprintf("cannot replace stmts with %T", y)) + } + *x = append(*x, last...) + case nil: + return + default: + panic(fmt.Sprintf("unsupported substitution: %T", x)) + } + // the new nodes have scrubbed positions, so try our best to use + // sensible ones + fixPositions(parent) +} + +func (m *matcher) parentOf(node ast.Node) ast.Node { + list, ok := node.(nodeList) + if ok { + node = list.at(0) + } + return m.parents[node] +} + +func (m *matcher) setParentOf(node, parent ast.Node) { + list, ok := node.(nodeList) + if ok { + if list.len() == 0 { + return + } + node = list.at(0) + } + m.parents[node] = parent +} + +func (m *matcher) nodePtr(node ast.Node) interface{} { + list, wantSlice := node.(nodeList) + if wantSlice { + node = list.at(0) + } + parent := m.parentOf(node) + if parent == nil { + return nil + } + v := reflect.ValueOf(parent).Elem() + for i := 0; i < v.NumField(); i++ { + fld := v.Field(i) + switch fld.Type().Kind() { + case reflect.Slice: + for i := 0; i < fld.Len(); i++ { + ifld := fld.Index(i) + if ifld.Interface() != node { + continue + } + if wantSlice { + return fld.Addr().Interface() + } + return ifld.Addr().Interface() + } + case reflect.Interface: + if fld.Interface() == node { + return fld.Addr().Interface() + } + } + } + return nil +} + +// nodePosHash is an ast.Node that can always be used as a key in maps, +// even for nodes that are slices like nodeList. +type nodePosHash struct { + pos, end token.Pos +} + +func (n nodePosHash) Pos() token.Pos { return n.pos } +func (n nodePosHash) End() token.Pos { return n.end } + +func posHash(node ast.Node) nodePosHash { + return nodePosHash{pos: node.Pos(), end: node.End()} +} + +var posType = reflect.TypeOf(token.NoPos) + +func scrubPositions(node ast.Node) { + inspect(node, func(node ast.Node) bool { + v := reflect.ValueOf(node) + if v.Kind() != reflect.Ptr { + return true + } + v = v.Elem() + if v.Kind() != reflect.Struct { + return true + } + for i := 0; i < v.NumField(); i++ { + fld := v.Field(i) + if fld.Type() == posType { + fld.SetInt(0) + } + } + return true + }) +} + +// fixPositions tries to fix common syntax errors caused from syntax rewrites. +func fixPositions(node ast.Node) { + if top, ok := node.(*topNode); ok { + node = top.Node + } + // fallback sets pos to the 'to' position if not valid. + fallback := func(pos *token.Pos, to token.Pos) { + if !pos.IsValid() { + *pos = to + } + } + ast.Inspect(node, func(node ast.Node) bool { + // TODO: many more node types + switch x := node.(type) { + case *ast.GoStmt: + fallback(&x.Go, x.Call.Pos()) + case *ast.ReturnStmt: + if len(x.Results) == 0 { + break + } + // Ensure that there's no newline before the returned + // values, as otherwise we have a naked return. See + // https://github.com/golang/go/issues/32854. + if pos := x.Results[0].Pos(); pos > x.Return { + x.Return = pos + } + } + return true + }) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/write.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/write.go new file mode 100644 index 0000000..b4796a8 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/write.go @@ -0,0 +1,63 @@ +// Copyright (c) 2018, Daniel Martí +// See LICENSE for licensing information + +package gogrep + +import ( + "go/ast" + "go/printer" + "os" +) + +func (m *matcher) cmdWrite(cmd exprCmd, subs []submatch) []submatch { + seenRoot := make(map[nodePosHash]bool) + filePaths := make(map[*ast.File]string) + var next []submatch + for _, sub := range subs { + root := m.nodeRoot(sub.node) + hash := posHash(root) + if seenRoot[hash] { + continue // avoid dups + } + seenRoot[hash] = true + file, ok := root.(*ast.File) + if ok { + path := m.fset.Position(file.Package).Filename + if path != "" { + // write to disk + filePaths[file] = path + continue + } + } + // pass it on, to print to stdout + next = append(next, submatch{node: root}) + } + for file, path := range filePaths { + f, err := os.OpenFile(path, os.O_WRONLY|os.O_TRUNC, 0) + if err != nil { + // TODO: return errors instead + panic(err) + } + if err := printConfig.Fprint(f, m.fset, file); err != nil { + // TODO: return errors instead + panic(err) + } + } + return next +} + +var printConfig = printer.Config{ + Mode: printer.UseSpaces | printer.TabIndent, + Tabwidth: 8, +} + +func (m *matcher) nodeRoot(node ast.Node) ast.Node { + parent := m.parentOf(node) + if parent == nil { + return node + } + if _, ok := parent.(nodeList); ok { + return parent + } + return m.nodeRoot(parent) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bool3.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bool3.go new file mode 100644 index 0000000..2ae3ff8 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bool3.go @@ -0,0 +1,11 @@ +package ruleguard + +type bool3 int + +const ( + bool3unset bool3 = iota + bool3false + bool3true +) + +const _ = bool3unset diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/dsl_importer.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/dsl_importer.go new file mode 100644 index 0000000..c566578 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/dsl_importer.go @@ -0,0 +1,40 @@ +package ruleguard + +import ( + "go/ast" + "go/importer" + "go/parser" + "go/token" + "go/types" + + "github.com/quasilyte/go-ruleguard/dslgen" +) + +type dslImporter struct { + fallback types.Importer +} + +func newDSLImporter() *dslImporter { + return &dslImporter{fallback: importer.Default()} +} + +func (i *dslImporter) Import(path string) (*types.Package, error) { + switch path { + case "github.com/quasilyte/go-ruleguard/dsl/fluent": + return i.importDSL(path, dslgen.Fluent) + + default: + return i.fallback.Import(path) + } +} + +func (i *dslImporter) importDSL(path string, src []byte) (*types.Package, error) { + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "dsl.go", src, 0) + if err != nil { + return nil, err + } + var typecheker types.Config + var info types.Info + return typecheker.Check(path, fset, []*ast.File{f}, &info) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go new file mode 100644 index 0000000..e3ad120 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go @@ -0,0 +1,44 @@ +package ruleguard + +import ( + "go/types" + + "github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep" +) + +type scopedGoRuleSet struct { + uncategorized []goRule + categorizedNum int + rulesByCategory [nodeCategoriesCount][]goRule +} + +type goRule struct { + group string + filename string + line int + severity string + pat *gogrep.Pattern + msg string + location string + suggestion string + filter matchFilter +} + +type matchFilter struct { + fileImports []string + filenamePred func(string) bool + sub map[string]submatchFilter +} + +type submatchFilter struct { + typePred func(typeQuery) bool + textPred func(string) bool + pure bool3 + constant bool3 + addressable bool3 +} + +type typeQuery struct { + x types.Type + ctx *Context +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/merge.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/merge.go new file mode 100644 index 0000000..e494930 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/merge.go @@ -0,0 +1,24 @@ +package ruleguard + +func mergeRuleSets(toMerge []*GoRuleSet) *GoRuleSet { + out := &GoRuleSet{ + local: &scopedGoRuleSet{}, + universal: &scopedGoRuleSet{}, + } + + for _, x := range toMerge { + out.local = appendScopedRuleSet(out.local, x.local) + out.universal = appendScopedRuleSet(out.universal, x.universal) + } + + return out +} + +func appendScopedRuleSet(dst, src *scopedGoRuleSet) *scopedGoRuleSet { + dst.uncategorized = append(dst.uncategorized, src.uncategorized...) + for cat, rules := range src.rulesByCategory { + dst.rulesByCategory[cat] = append(dst.rulesByCategory[cat], rules...) + dst.categorizedNum += len(rules) + } + return dst +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go new file mode 100644 index 0000000..859ed39 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go @@ -0,0 +1,159 @@ +package ruleguard + +import ( + "go/ast" +) + +type nodeCategory int + +const ( + nodeUnknown nodeCategory = iota + + nodeArrayType + nodeAssignStmt + nodeBasicLit + nodeBinaryExpr + nodeBlockStmt + nodeBranchStmt + nodeCallExpr + nodeCaseClause + nodeChanType + nodeCommClause + nodeCompositeLit + nodeDeclStmt + nodeDeferStmt + nodeEllipsis + nodeEmptyStmt + nodeExprStmt + nodeForStmt + nodeFuncDecl + nodeFuncLit + nodeFuncType + nodeGenDecl + nodeGoStmt + nodeIdent + nodeIfStmt + nodeImportSpec + nodeIncDecStmt + nodeIndexExpr + nodeInterfaceType + nodeKeyValueExpr + nodeLabeledStmt + nodeMapType + nodeParenExpr + nodeRangeStmt + nodeReturnStmt + nodeSelectStmt + nodeSelectorExpr + nodeSendStmt + nodeSliceExpr + nodeStarExpr + nodeStructType + nodeSwitchStmt + nodeTypeAssertExpr + nodeTypeSpec + nodeTypeSwitchStmt + nodeUnaryExpr + nodeValueSpec + + nodeCategoriesCount +) + +func categorizeNode(n ast.Node) nodeCategory { + switch n.(type) { + case *ast.ArrayType: + return nodeArrayType + case *ast.AssignStmt: + return nodeAssignStmt + case *ast.BasicLit: + return nodeBasicLit + case *ast.BinaryExpr: + return nodeBinaryExpr + case *ast.BlockStmt: + return nodeBlockStmt + case *ast.BranchStmt: + return nodeBranchStmt + case *ast.CallExpr: + return nodeCallExpr + case *ast.CaseClause: + return nodeCaseClause + case *ast.ChanType: + return nodeChanType + case *ast.CommClause: + return nodeCommClause + case *ast.CompositeLit: + return nodeCompositeLit + case *ast.DeclStmt: + return nodeDeclStmt + case *ast.DeferStmt: + return nodeDeferStmt + case *ast.Ellipsis: + return nodeEllipsis + case *ast.EmptyStmt: + return nodeEmptyStmt + case *ast.ExprStmt: + return nodeExprStmt + case *ast.ForStmt: + return nodeForStmt + case *ast.FuncDecl: + return nodeFuncDecl + case *ast.FuncLit: + return nodeFuncLit + case *ast.FuncType: + return nodeFuncType + case *ast.GenDecl: + return nodeGenDecl + case *ast.GoStmt: + return nodeGoStmt + case *ast.Ident: + return nodeIdent + case *ast.IfStmt: + return nodeIfStmt + case *ast.ImportSpec: + return nodeImportSpec + case *ast.IncDecStmt: + return nodeIncDecStmt + case *ast.IndexExpr: + return nodeIndexExpr + case *ast.InterfaceType: + return nodeInterfaceType + case *ast.KeyValueExpr: + return nodeKeyValueExpr + case *ast.LabeledStmt: + return nodeLabeledStmt + case *ast.MapType: + return nodeMapType + case *ast.ParenExpr: + return nodeParenExpr + case *ast.RangeStmt: + return nodeRangeStmt + case *ast.ReturnStmt: + return nodeReturnStmt + case *ast.SelectStmt: + return nodeSelectStmt + case *ast.SelectorExpr: + return nodeSelectorExpr + case *ast.SendStmt: + return nodeSendStmt + case *ast.SliceExpr: + return nodeSliceExpr + case *ast.StarExpr: + return nodeStarExpr + case *ast.StructType: + return nodeStructType + case *ast.SwitchStmt: + return nodeSwitchStmt + case *ast.TypeAssertExpr: + return nodeTypeAssertExpr + case *ast.TypeSpec: + return nodeTypeSpec + case *ast.TypeSwitchStmt: + return nodeTypeSwitchStmt + case *ast.UnaryExpr: + return nodeUnaryExpr + case *ast.ValueSpec: + return nodeValueSpec + default: + return nodeUnknown + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go new file mode 100644 index 0000000..fc9ed5b --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go @@ -0,0 +1,722 @@ +package ruleguard + +import ( + "fmt" + "go/ast" + "go/constant" + "go/importer" + "go/parser" + "go/token" + "go/types" + "io" + "path" + "path/filepath" + "regexp" + "strconv" + + "github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep" + "github.com/quasilyte/go-ruleguard/ruleguard/typematch" +) + +type rulesParser struct { + filename string + group string + fset *token.FileSet + res *GoRuleSet + types *types.Info + + itab *typematch.ImportsTab + dslImporter types.Importer + stdImporter types.Importer // TODO(quasilyte): share importer with gogrep? + srcImporter types.Importer +} + +func newRulesParser() *rulesParser { + var stdlib = map[string]string{ + "adler32": "hash/adler32", + "aes": "crypto/aes", + "ascii85": "encoding/ascii85", + "asn1": "encoding/asn1", + "ast": "go/ast", + "atomic": "sync/atomic", + "base32": "encoding/base32", + "base64": "encoding/base64", + "big": "math/big", + "binary": "encoding/binary", + "bits": "math/bits", + "bufio": "bufio", + "build": "go/build", + "bytes": "bytes", + "bzip2": "compress/bzip2", + "cgi": "net/http/cgi", + "cgo": "runtime/cgo", + "cipher": "crypto/cipher", + "cmplx": "math/cmplx", + "color": "image/color", + "constant": "go/constant", + "context": "context", + "cookiejar": "net/http/cookiejar", + "crc32": "hash/crc32", + "crc64": "hash/crc64", + "crypto": "crypto", + "csv": "encoding/csv", + "debug": "runtime/debug", + "des": "crypto/des", + "doc": "go/doc", + "draw": "image/draw", + "driver": "database/sql/driver", + "dsa": "crypto/dsa", + "dwarf": "debug/dwarf", + "ecdsa": "crypto/ecdsa", + "ed25519": "crypto/ed25519", + "elf": "debug/elf", + "elliptic": "crypto/elliptic", + "encoding": "encoding", + "errors": "errors", + "exec": "os/exec", + "expvar": "expvar", + "fcgi": "net/http/fcgi", + "filepath": "path/filepath", + "flag": "flag", + "flate": "compress/flate", + "fmt": "fmt", + "fnv": "hash/fnv", + "format": "go/format", + "gif": "image/gif", + "gob": "encoding/gob", + "gosym": "debug/gosym", + "gzip": "compress/gzip", + "hash": "hash", + "heap": "container/heap", + "hex": "encoding/hex", + "hmac": "crypto/hmac", + "html": "html", + "http": "net/http", + "httptest": "net/http/httptest", + "httptrace": "net/http/httptrace", + "httputil": "net/http/httputil", + "image": "image", + "importer": "go/importer", + "io": "io", + "iotest": "testing/iotest", + "ioutil": "io/ioutil", + "jpeg": "image/jpeg", + "json": "encoding/json", + "jsonrpc": "net/rpc/jsonrpc", + "list": "container/list", + "log": "log", + "lzw": "compress/lzw", + "macho": "debug/macho", + "mail": "net/mail", + "math": "math", + "md5": "crypto/md5", + "mime": "mime", + "multipart": "mime/multipart", + "net": "net", + "os": "os", + "palette": "image/color/palette", + "parse": "text/template/parse", + "parser": "go/parser", + "path": "path", + "pe": "debug/pe", + "pem": "encoding/pem", + "pkix": "crypto/x509/pkix", + "plan9obj": "debug/plan9obj", + "plugin": "plugin", + "png": "image/png", + "pprof": "runtime/pprof", + "printer": "go/printer", + "quick": "testing/quick", + "quotedprintable": "mime/quotedprintable", + "race": "runtime/race", + "rand": "math/rand", + "rc4": "crypto/rc4", + "reflect": "reflect", + "regexp": "regexp", + "ring": "container/ring", + "rpc": "net/rpc", + "rsa": "crypto/rsa", + "runtime": "runtime", + "scanner": "text/scanner", + "sha1": "crypto/sha1", + "sha256": "crypto/sha256", + "sha512": "crypto/sha512", + "signal": "os/signal", + "smtp": "net/smtp", + "sort": "sort", + "sql": "database/sql", + "strconv": "strconv", + "strings": "strings", + "subtle": "crypto/subtle", + "suffixarray": "index/suffixarray", + "sync": "sync", + "syntax": "regexp/syntax", + "syscall": "syscall", + "syslog": "log/syslog", + "tabwriter": "text/tabwriter", + "tar": "archive/tar", + "template": "text/template", + "testing": "testing", + "textproto": "net/textproto", + "time": "time", + "tls": "crypto/tls", + "token": "go/token", + "trace": "runtime/trace", + "types": "go/types", + "unicode": "unicode", + "unsafe": "unsafe", + "url": "net/url", + "user": "os/user", + "utf16": "unicode/utf16", + "utf8": "unicode/utf8", + "x509": "crypto/x509", + "xml": "encoding/xml", + "zip": "archive/zip", + "zlib": "compress/zlib", + } + + // TODO(quasilyte): do we need to pass the fileset here? + fset := token.NewFileSet() + return &rulesParser{ + itab: typematch.NewImportsTab(stdlib), + stdImporter: importer.Default(), + srcImporter: importer.ForCompiler(fset, "source", nil), + dslImporter: newDSLImporter(), + } +} + +func (p *rulesParser) ParseFile(filename string, fset *token.FileSet, r io.Reader) (*GoRuleSet, error) { + p.filename = filename + p.fset = fset + p.res = &GoRuleSet{ + local: &scopedGoRuleSet{}, + universal: &scopedGoRuleSet{}, + } + + parserFlags := parser.Mode(0) + f, err := parser.ParseFile(fset, filename, r, parserFlags) + if err != nil { + return nil, fmt.Errorf("parser error: %v", err) + } + + if f.Name.Name != "gorules" { + return nil, fmt.Errorf("expected a gorules package name, found %s", f.Name.Name) + } + + typechecker := types.Config{Importer: p.dslImporter} + p.types = &types.Info{Types: map[ast.Expr]types.TypeAndValue{}} + _, err = typechecker.Check("gorules", fset, []*ast.File{f}, p.types) + if err != nil { + return nil, fmt.Errorf("typechecker error: %v", err) + } + + for _, decl := range f.Decls { + decl, ok := decl.(*ast.FuncDecl) + if !ok { + continue + } + if err := p.parseRuleGroup(decl); err != nil { + return nil, err + } + } + + return p.res, nil +} + +func (p *rulesParser) parseRuleGroup(f *ast.FuncDecl) error { + if f.Body == nil { + return p.errorf(f, "unexpected empty function body") + } + if f.Type.Results != nil { + return p.errorf(f.Type.Results, "rule group function should not return anything") + } + params := f.Type.Params.List + if len(params) != 1 || len(params[0].Names) != 1 { + return p.errorf(f.Type.Params, "rule group function should accept exactly 1 Matcher param") + } + // TODO(quasilyte): do an actual matcher param type check? + matcher := params[0].Names[0].Name + + p.group = f.Name.Name + + p.itab.EnterScope() + defer p.itab.LeaveScope() + + for _, stmt := range f.Body.List { + if _, ok := stmt.(*ast.DeclStmt); ok { + continue + } + stmtExpr, ok := stmt.(*ast.ExprStmt) + if !ok { + return p.errorf(stmt, "expected a %s method call, found %s", matcher, sprintNode(p.fset, stmt)) + } + call, ok := stmtExpr.X.(*ast.CallExpr) + if !ok { + return p.errorf(stmt, "expected a %s method call, found %s", matcher, sprintNode(p.fset, stmt)) + } + if err := p.parseCall(matcher, call); err != nil { + return err + } + + } + + return nil +} + +func (p *rulesParser) parseCall(matcher string, call *ast.CallExpr) error { + f := call.Fun.(*ast.SelectorExpr) + x, ok := f.X.(*ast.Ident) + if ok && x.Name == matcher { + return p.parseStmt(f.Sel, call.Args) + } + + return p.parseRule(matcher, call) +} + +func (p *rulesParser) parseStmt(fn *ast.Ident, args []ast.Expr) error { + switch fn.Name { + case "Import": + pkgPath, ok := p.toStringValue(args[0]) + if !ok { + return p.errorf(args[0], "expected a string literal argument") + } + pkgName := path.Base(pkgPath) + p.itab.Load(pkgName, pkgPath) + return nil + default: + return p.errorf(fn, "unexpected %s method", fn.Name) + } +} + +func (p *rulesParser) parseRule(matcher string, call *ast.CallExpr) error { + origCall := call + var ( + matchArgs *[]ast.Expr + whereArgs *[]ast.Expr + suggestArgs *[]ast.Expr + reportArgs *[]ast.Expr + atArgs *[]ast.Expr + ) + for { + chain, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + break + } + switch chain.Sel.Name { + case "Match": + if matchArgs != nil { + return p.errorf(chain.Sel, "Match() can't be repeated") + } + matchArgs = &call.Args + case "Where": + if whereArgs != nil { + return p.errorf(chain.Sel, "Where() can't be repeated") + } + whereArgs = &call.Args + case "Suggest": + if suggestArgs != nil { + return p.errorf(chain.Sel, "Suggest() can't be repeated") + } + suggestArgs = &call.Args + case "Report": + if reportArgs != nil { + return p.errorf(chain.Sel, "Report() can't be repeated") + } + reportArgs = &call.Args + case "At": + if atArgs != nil { + return p.errorf(chain.Sel, "At() can't be repeated") + } + atArgs = &call.Args + default: + return p.errorf(chain.Sel, "unexpected %s method", chain.Sel.Name) + } + call, ok = chain.X.(*ast.CallExpr) + if !ok { + break + } + } + + dst := p.res.universal + proto := goRule{ + filename: p.filename, + line: p.fset.Position(origCall.Pos()).Line, + group: p.group, + filter: matchFilter{ + sub: map[string]submatchFilter{}, + }, + } + var alternatives []string + + if matchArgs == nil { + return p.errorf(origCall, "missing Match() call") + } + for _, arg := range *matchArgs { + alt, ok := p.toStringValue(arg) + if !ok { + return p.errorf(arg, "expected a string literal argument") + } + alternatives = append(alternatives, alt) + } + + if whereArgs != nil { + if err := p.walkFilter(&proto.filter, (*whereArgs)[0], false); err != nil { + return err + } + } + + if suggestArgs != nil { + s, ok := p.toStringValue((*suggestArgs)[0]) + if !ok { + return p.errorf((*suggestArgs)[0], "expected string literal argument") + } + proto.suggestion = s + } + + if reportArgs == nil { + if suggestArgs == nil { + return p.errorf(origCall, "missing Report() or Suggest() call") + } + proto.msg = "suggestion: " + proto.suggestion + } else { + message, ok := p.toStringValue((*reportArgs)[0]) + if !ok { + return p.errorf((*reportArgs)[0], "expected string literal argument") + } + proto.msg = message + } + + if atArgs != nil { + index, ok := (*atArgs)[0].(*ast.IndexExpr) + if !ok { + return p.errorf((*atArgs)[0], "expected %s[`varname`] expression", matcher) + } + arg, ok := p.toStringValue(index.Index) + if !ok { + return p.errorf(index.Index, "expected a string literal index") + } + proto.location = arg + } + + for i, alt := range alternatives { + rule := proto + pat, err := gogrep.Parse(p.fset, alt) + if err != nil { + return p.errorf((*matchArgs)[i], "gogrep parse: %v", err) + } + rule.pat = pat + cat := categorizeNode(pat.Expr) + if cat == nodeUnknown { + dst.uncategorized = append(dst.uncategorized, rule) + } else { + dst.categorizedNum++ + dst.rulesByCategory[cat] = append(dst.rulesByCategory[cat], rule) + } + } + + return nil +} + +func (p *rulesParser) walkFilter(dst *matchFilter, e ast.Expr, negate bool) error { + typeAnd := func(x, y func(typeQuery) bool) func(typeQuery) bool { + if x == nil { + return y + } + return func(q typeQuery) bool { + return x(q) && y(q) + } + } + textAnd := func(x, y func(string) bool) func(string) bool { + if x == nil { + return y + } + return func(s string) bool { + return x(s) && y(s) + } + } + switch e := e.(type) { + case *ast.UnaryExpr: + if e.Op == token.NOT { + return p.walkFilter(dst, e.X, !negate) + } + case *ast.BinaryExpr: + switch e.Op { + case token.LAND: + err := p.walkFilter(dst, e.X, negate) + if err != nil { + return err + } + return p.walkFilter(dst, e.Y, negate) + case token.GEQ, token.LEQ, token.LSS, token.GTR, token.EQL, token.NEQ: + operand := p.toFilterOperand(e.X) + y := p.types.Types[e.Y].Value + expectedResult := !negate + if operand.path == "Type.Size" && y != nil { + filter := dst.sub[operand.varName] + filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool { + x := constant.MakeInt64(q.ctx.Sizes.Sizeof(q.x)) + return expectedResult == constant.Compare(x, e.Op, y) + }) + dst.sub[operand.varName] = filter + return nil + } + if operand.path == "Text" && y != nil { + filter := dst.sub[operand.varName] + filter.textPred = textAnd(filter.textPred, func(s string) bool { + x := constant.MakeString(s) + return expectedResult == constant.Compare(x, e.Op, y) + }) + dst.sub[operand.varName] = filter + return nil + } + } + case *ast.ParenExpr: + return p.walkFilter(dst, e.X, negate) + } + + // TODO(quasilyte): refactor and extend. + operand := p.toFilterOperand(e) + args := operand.args + filter := dst.sub[operand.varName] + underlying := false + switch operand.path { + default: + return p.errorf(e, "%s is not a valid filter expression", sprintNode(p.fset, e)) + case "File.Imports": + pkgPath, ok := p.toStringValue(args[0]) + if !ok { + return p.errorf(args[0], "expected a string literal argument") + } + dst.fileImports = append(dst.fileImports, pkgPath) + return nil + case "File.Name.Matches": + patternString, ok := p.toStringValue(args[0]) + if !ok { + return p.errorf(args[0], "expected a string literal argument") + } + re, err := regexp.Compile(patternString) + if err != nil { + return p.errorf(args[0], "parse regexp: %v", err) + } + wantMatched := !negate + dst.filenamePred = func(filename string) bool { + return wantMatched == re.MatchString(filepath.Base(filename)) + } + return nil + + case "Pure": + if negate { + filter.pure = bool3false + } else { + filter.pure = bool3true + } + dst.sub[operand.varName] = filter + case "Const": + if negate { + filter.constant = bool3false + } else { + filter.constant = bool3true + } + dst.sub[operand.varName] = filter + case "Addressable": + if negate { + filter.addressable = bool3false + } else { + filter.addressable = bool3true + } + dst.sub[operand.varName] = filter + case "Text.Matches": + patternString, ok := p.toStringValue(args[0]) + if !ok { + return p.errorf(args[0], "expected a string literal argument") + } + re, err := regexp.Compile(patternString) + if err != nil { + return p.errorf(args[0], "parse regexp: %v", err) + } + wantMatched := !negate + filter.textPred = textAnd(filter.textPred, func(s string) bool { + return wantMatched == re.MatchString(s) + }) + dst.sub[operand.varName] = filter + case "Type.Underlying.Is": + underlying = true + fallthrough + case "Type.Is": + typeString, ok := p.toStringValue(args[0]) + if !ok { + return p.errorf(args[0], "expected a string literal argument") + } + ctx := typematch.Context{Itab: p.itab} + pat, err := typematch.Parse(&ctx, typeString) + if err != nil { + return p.errorf(args[0], "parse type expr: %v", err) + } + wantIdentical := !negate + if underlying { + filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool { + return wantIdentical == pat.MatchIdentical(q.x.Underlying()) + }) + } else { + filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool { + return wantIdentical == pat.MatchIdentical(q.x) + }) + } + dst.sub[operand.varName] = filter + case "Type.ConvertibleTo": + typeString, ok := p.toStringValue(args[0]) + if !ok { + return p.errorf(args[0], "expected a string literal argument") + } + y, err := typeFromString(typeString) + if err != nil { + return p.errorf(args[0], "parse type expr: %v", err) + } + if y == nil { + return p.errorf(args[0], "can't convert %s into a type constraint yet", typeString) + } + wantConvertible := !negate + filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool { + return wantConvertible == types.ConvertibleTo(q.x, y) + }) + dst.sub[operand.varName] = filter + case "Type.AssignableTo": + typeString, ok := p.toStringValue(args[0]) + if !ok { + return p.errorf(args[0], "expected a string literal argument") + } + y, err := typeFromString(typeString) + if err != nil { + return p.errorf(args[0], "parse type expr: %v", err) + } + if y == nil { + return p.errorf(args[0], "can't convert %s into a type constraint yet", typeString) + } + wantAssignable := !negate + filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool { + return wantAssignable == types.AssignableTo(q.x, y) + }) + dst.sub[operand.varName] = filter + case "Type.Implements": + typeString, ok := p.toStringValue(args[0]) + if !ok { + return p.errorf(args[0], "expected a string literal argument") + } + n, err := parser.ParseExpr(typeString) + if err != nil { + return p.errorf(args[0], "parse type expr: %v", err) + } + e, ok := n.(*ast.SelectorExpr) + if !ok { + return p.errorf(args[0], "only qualified names are supported") + } + pkgName, ok := e.X.(*ast.Ident) + if !ok { + return p.errorf(e.X, "invalid package name") + } + pkgPath, ok := p.itab.Lookup(pkgName.Name) + if !ok { + return p.errorf(e.X, "package %s is not imported", pkgName.Name) + } + pkg, err := p.stdImporter.Import(pkgPath) + if err != nil { + pkg, err = p.srcImporter.Import(pkgPath) + if err != nil { + return p.errorf(e, "can't load %s: %v", pkgPath, err) + } + } + obj := pkg.Scope().Lookup(e.Sel.Name) + if obj == nil { + return p.errorf(e, "%s is not found in %s", e.Sel.Name, pkgPath) + } + iface, ok := obj.Type().Underlying().(*types.Interface) + if !ok { + return p.errorf(e, "%s is not an interface type", e.Sel.Name) + } + wantImplemented := !negate + filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool { + return wantImplemented == types.Implements(q.x, iface) + }) + dst.sub[operand.varName] = filter + } + + return nil +} + +func (p *rulesParser) toStringValue(x ast.Node) (string, bool) { + switch x := x.(type) { + case *ast.BasicLit: + if x.Kind != token.STRING { + return "", false + } + s, err := strconv.Unquote(x.Value) + if err != nil { + return "", false + } + return s, true + case ast.Expr: + typ, ok := p.types.Types[x] + if !ok || typ.Type.String() != "string" { + return "", false + } + str := typ.Value.ExactString() + str = str[1 : len(str)-1] // remove quotes + return str, true + } + return "", false +} + +func (p *rulesParser) toFilterOperand(e ast.Expr) filterOperand { + var o filterOperand + + if call, ok := e.(*ast.CallExpr); ok { + o.args = call.Args + e = call.Fun + } + var path string + for { + if call, ok := e.(*ast.CallExpr); ok { + e = call.Fun + continue + } + selector, ok := e.(*ast.SelectorExpr) + if !ok { + break + } + if path == "" { + path = selector.Sel.Name + } else { + path = selector.Sel.Name + "." + path + } + e = selector.X + } + + o.path = path + + indexing, ok := e.(*ast.IndexExpr) + if !ok { + return o + } + mapIdent, ok := indexing.X.(*ast.Ident) + if !ok { + return o + } + o.mapName = mapIdent.Name + indexString, _ := p.toStringValue(indexing.Index) + o.varName = indexString + + return o +} + +func (p *rulesParser) errorf(n ast.Node, format string, args ...interface{}) error { + loc := p.fset.Position(n.Pos()) + return fmt.Errorf("%s:%d: %s", + loc.Filename, loc.Line, fmt.Sprintf(format, args...)) +} + +type filterOperand struct { + mapName string + varName string + path string + args []ast.Expr +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go new file mode 100644 index 0000000..841bc27 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go @@ -0,0 +1,54 @@ +package ruleguard + +import ( + "go/ast" + "go/token" + "go/types" + "io" +) + +type Context struct { + Debug string + DebugPrint func(string) + + Types *types.Info + Sizes types.Sizes + Fset *token.FileSet + Report func(rule GoRuleInfo, n ast.Node, msg string, s *Suggestion) + Pkg *types.Package +} + +type Suggestion struct { + From token.Pos + To token.Pos + Replacement []byte +} + +func ParseRules(filename string, fset *token.FileSet, r io.Reader) (*GoRuleSet, error) { + p := newRulesParser() + return p.ParseFile(filename, fset, r) +} + +func RunRules(ctx *Context, f *ast.File, rules *GoRuleSet) error { + return newRulesRunner(ctx, rules).run(f) +} + +type GoRuleInfo struct { + // Filename is a file that defined this rule. + Filename string + + // Line is a line inside a file that defined this rule. + Line int + + // Group is a function name that contained this rule. + Group string +} + +type GoRuleSet struct { + universal *scopedGoRuleSet + local *scopedGoRuleSet +} + +func MergeRuleSets(toMerge []*GoRuleSet) *GoRuleSet { + return mergeRuleSets(toMerge) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go new file mode 100644 index 0000000..e5b353b --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go @@ -0,0 +1,274 @@ +package ruleguard + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "io/ioutil" + "path/filepath" + "strconv" + "strings" + + "github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep" +) + +type rulesRunner struct { + ctx *Context + rules *GoRuleSet + + filename string + imports map[string]struct{} + src []byte +} + +func newRulesRunner(ctx *Context, rules *GoRuleSet) *rulesRunner { + return &rulesRunner{ + ctx: ctx, + rules: rules, + } +} + +func (rr *rulesRunner) nodeText(n ast.Node) []byte { + from := rr.ctx.Fset.Position(n.Pos()).Offset + to := rr.ctx.Fset.Position(n.End()).Offset + src := rr.fileBytes() + if (from >= 0 && int(from) < len(src)) && (to >= 0 && int(to) < len(src)) { + return src[from:to] + } + // Fallback to the printer. + var buf bytes.Buffer + if err := printer.Fprint(&buf, rr.ctx.Fset, n); err != nil { + panic(err) + } + return buf.Bytes() +} + +func (rr *rulesRunner) fileBytes() []byte { + if rr.src != nil { + return rr.src + } + + // TODO(quasilyte): re-use src slice? + src, err := ioutil.ReadFile(rr.filename) + if err != nil || src == nil { + // Assign a zero-length slice so rr.src + // is never nil during the second fileBytes call. + rr.src = make([]byte, 0) + } else { + rr.src = src + } + return rr.src +} + +func (rr *rulesRunner) run(f *ast.File) error { + // TODO(quasilyte): run local rules as well. + + rr.filename = rr.ctx.Fset.Position(f.Pos()).Filename + rr.collectImports(f) + + for _, rule := range rr.rules.universal.uncategorized { + rule.pat.Match(f, func(m gogrep.MatchData) { + rr.handleMatch(rule, m) + }) + } + + if rr.rules.universal.categorizedNum != 0 { + ast.Inspect(f, func(n ast.Node) bool { + cat := categorizeNode(n) + for _, rule := range rr.rules.universal.rulesByCategory[cat] { + matched := false + rule.pat.MatchNode(n, func(m gogrep.MatchData) { + matched = rr.handleMatch(rule, m) + }) + if matched { + break + } + } + return true + }) + } + + return nil +} + +func (rr *rulesRunner) reject(rule goRule, reason, sub string, m gogrep.MatchData) { + // Note: we accept reason and sub args instead of formatted or + // concatenated string so it's cheaper for us to call this + // function is debugging is not enabled. + + if rule.group != rr.ctx.Debug { + return // This rule is not being debugged + } + + pos := rr.ctx.Fset.Position(m.Node.Pos()) + if sub != "" { + reason = "$" + sub + " " + reason + } + rr.ctx.DebugPrint(fmt.Sprintf("%s:%d: rejected by %s:%d (%s)", + pos.Filename, pos.Line, filepath.Base(rule.filename), rule.line, reason)) + for name, node := range m.Values { + var expr ast.Expr + switch node := node.(type) { + case ast.Expr: + expr = node + case *ast.ExprStmt: + expr = node.X + default: + continue + } + + typ := rr.ctx.Types.TypeOf(expr) + s := strings.ReplaceAll(sprintNode(rr.ctx.Fset, expr), "\n", `\n`) + rr.ctx.DebugPrint(fmt.Sprintf(" $%s %s: %s", name, typ, s)) + } +} + +func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool { + for _, neededImport := range rule.filter.fileImports { + if _, ok := rr.imports[neededImport]; !ok { + rr.reject(rule, "file imports filter", "", m) + return false + } + } + + // TODO(quasilyte): do not run filename check for every match. + // Exclude rules for the file that will never match due to the + // file-scoped filters. Same goes for the fileImports filter + // and ideas proposed in #78. Most rules do not have file-scoped + // filters, so we don't loose much here, but we can optimize + // this file filters in the future. + if rule.filter.filenamePred != nil && !rule.filter.filenamePred(rr.filename) { + rr.reject(rule, "file name filter", "", m) + return false + } + + for name, node := range m.Values { + var expr ast.Expr + switch node := node.(type) { + case ast.Expr: + expr = node + case *ast.ExprStmt: + expr = node.X + default: + continue + } + + filter, ok := rule.filter.sub[name] + if !ok { + continue + } + if filter.typePred != nil { + typ := rr.ctx.Types.TypeOf(expr) + q := typeQuery{x: typ, ctx: rr.ctx} + if !filter.typePred(q) { + rr.reject(rule, "type filter", name, m) + return false + } + } + if filter.textPred != nil { + if !filter.textPred(string(rr.nodeText(expr))) { + rr.reject(rule, "text filter", name, m) + return false + } + } + switch filter.addressable { + case bool3true: + if !isAddressable(rr.ctx.Types, expr) { + rr.reject(rule, "is not addressable", name, m) + return false + } + case bool3false: + if isAddressable(rr.ctx.Types, expr) { + rr.reject(rule, "is addressable", name, m) + return false + } + } + switch filter.pure { + case bool3true: + if !isPure(rr.ctx.Types, expr) { + rr.reject(rule, "is not pure", name, m) + return false + } + case bool3false: + if isPure(rr.ctx.Types, expr) { + rr.reject(rule, "is pure", name, m) + return false + } + } + switch filter.constant { + case bool3true: + if !isConstant(rr.ctx.Types, expr) { + rr.reject(rule, "is not const", name, m) + return false + } + case bool3false: + if isConstant(rr.ctx.Types, expr) { + rr.reject(rule, "is const", name, m) + return false + } + } + } + + prefix := "" + if rule.severity != "" { + prefix = rule.severity + ": " + } + message := prefix + rr.renderMessage(rule.msg, m.Node, m.Values, true) + node := m.Node + if rule.location != "" { + node = m.Values[rule.location] + } + var suggestion *Suggestion + if rule.suggestion != "" { + suggestion = &Suggestion{ + Replacement: []byte(rr.renderMessage(rule.suggestion, m.Node, m.Values, false)), + From: node.Pos(), + To: node.End(), + } + } + info := GoRuleInfo{ + Filename: rule.filename, + } + rr.ctx.Report(info, node, message, suggestion) + return true +} + +func (rr *rulesRunner) collectImports(f *ast.File) { + rr.imports = make(map[string]struct{}, len(f.Imports)) + for _, spec := range f.Imports { + s, err := strconv.Unquote(spec.Path.Value) + if err != nil { + continue + } + rr.imports[s] = struct{}{} + } +} + +func (rr *rulesRunner) renderMessage(msg string, n ast.Node, nodes map[string]ast.Node, truncate bool) string { + var buf strings.Builder + if strings.Contains(msg, "$$") { + buf.Write(rr.nodeText(n)) + msg = strings.ReplaceAll(msg, "$$", buf.String()) + } + if len(nodes) == 0 { + return msg + } + for name, n := range nodes { + key := "$" + name + if !strings.Contains(msg, key) { + continue + } + buf.Reset() + buf.Write(rr.nodeText(n)) + // Don't interpolate strings that are too long. + var replacement string + if truncate && buf.Len() > 60 { + replacement = key + } else { + replacement = buf.String() + } + msg = strings.ReplaceAll(msg, key, replacement) + } + return msg +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go new file mode 100644 index 0000000..2a13567 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go @@ -0,0 +1,519 @@ +package typematch + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + "strconv" + "strings" +) + +type patternOp int + +const ( + opBuiltinType patternOp = iota + opPointer + opVar + opVarSeq + opSlice + opArray + opMap + opChan + opFunc + opStructNoSeq + opStruct + opNamed +) + +type Pattern struct { + typeMatches map[string]types.Type + int64Matches map[string]int64 + + root *pattern +} + +type pattern struct { + value interface{} + op patternOp + subs []*pattern +} + +type ImportsTab struct { + imports []map[string]string +} + +func NewImportsTab(initial map[string]string) *ImportsTab { + return &ImportsTab{imports: []map[string]string{initial}} +} + +func (itab *ImportsTab) Lookup(pkgName string) (string, bool) { + for i := len(itab.imports) - 1; i >= 0; i-- { + pkgPath, ok := itab.imports[i][pkgName] + if ok { + return pkgPath, true + } + } + return "", false +} + +func (itab *ImportsTab) Load(pkgName, pkgPath string) { + itab.imports[len(itab.imports)-1][pkgName] = pkgPath +} + +func (itab *ImportsTab) EnterScope() { + itab.imports = append(itab.imports, map[string]string{}) +} + +func (itab *ImportsTab) LeaveScope() { + itab.imports = itab.imports[:len(itab.imports)-1] +} + +type Context struct { + Itab *ImportsTab +} + +const ( + varPrefix = `ᐸvarᐳ` + varSeqPrefix = `ᐸvar_seqᐳ` +) + +func Parse(ctx *Context, s string) (*Pattern, error) { + noDollars := strings.ReplaceAll(s, "$*", varSeqPrefix) + noDollars = strings.ReplaceAll(noDollars, "$", varPrefix) + n, err := parser.ParseExpr(noDollars) + if err != nil { + return nil, err + } + root := parseExpr(ctx, n) + if root == nil { + return nil, fmt.Errorf("can't convert %s type expression (%T)", s, n) + } + p := &Pattern{ + typeMatches: map[string]types.Type{}, + int64Matches: map[string]int64{}, + root: root, + } + return p, nil +} + +var ( + builtinTypeByName = map[string]types.Type{ + "bool": types.Typ[types.Bool], + "int": types.Typ[types.Int], + "int8": types.Typ[types.Int8], + "int16": types.Typ[types.Int16], + "int32": types.Typ[types.Int32], + "int64": types.Typ[types.Int64], + "uint": types.Typ[types.Uint], + "uint8": types.Typ[types.Uint8], + "uint16": types.Typ[types.Uint16], + "uint32": types.Typ[types.Uint32], + "uint64": types.Typ[types.Uint64], + "uintptr": types.Typ[types.Uintptr], + "float32": types.Typ[types.Float32], + "float64": types.Typ[types.Float64], + "complex64": types.Typ[types.Complex64], + "complex128": types.Typ[types.Complex128], + "string": types.Typ[types.String], + + "error": types.Universe.Lookup("error").Type(), + + // Aliases. + "byte": types.Typ[types.Uint8], + "rune": types.Typ[types.Int32], + } + + efaceType = types.NewInterfaceType(nil, nil) +) + +func parseExpr(ctx *Context, e ast.Expr) *pattern { + switch e := e.(type) { + case *ast.Ident: + basic, ok := builtinTypeByName[e.Name] + if ok { + return &pattern{op: opBuiltinType, value: basic} + } + if strings.HasPrefix(e.Name, varPrefix) { + name := strings.TrimPrefix(e.Name, varPrefix) + return &pattern{op: opVar, value: name} + } + if strings.HasPrefix(e.Name, varSeqPrefix) { + name := strings.TrimPrefix(e.Name, varSeqPrefix) + // Only unnamed seq are supported right now. + if name == "_" { + return &pattern{op: opVarSeq, value: name} + } + } + + case *ast.SelectorExpr: + pkg, ok := e.X.(*ast.Ident) + if !ok { + return nil + } + pkgPath, ok := ctx.Itab.Lookup(pkg.Name) + if !ok { + return nil + } + return &pattern{op: opNamed, value: [2]string{pkgPath, e.Sel.Name}} + + case *ast.StarExpr: + elem := parseExpr(ctx, e.X) + if elem == nil { + return nil + } + return &pattern{op: opPointer, subs: []*pattern{elem}} + + case *ast.ArrayType: + elem := parseExpr(ctx, e.Elt) + if elem == nil { + return nil + } + if e.Len == nil { + return &pattern{ + op: opSlice, + subs: []*pattern{elem}, + } + } + if id, ok := e.Len.(*ast.Ident); ok && strings.HasPrefix(id.Name, varPrefix) { + name := strings.TrimPrefix(id.Name, varPrefix) + return &pattern{ + op: opArray, + value: name, + subs: []*pattern{elem}, + } + } + lit, ok := e.Len.(*ast.BasicLit) + if !ok || lit.Kind != token.INT { + return nil + } + length, err := strconv.ParseInt(lit.Value, 10, 64) + if err != nil { + return nil + } + return &pattern{ + op: opArray, + value: length, + subs: []*pattern{elem}, + } + + case *ast.MapType: + keyType := parseExpr(ctx, e.Key) + if keyType == nil { + return nil + } + valType := parseExpr(ctx, e.Value) + if valType == nil { + return nil + } + return &pattern{ + op: opMap, + subs: []*pattern{keyType, valType}, + } + + case *ast.ChanType: + valType := parseExpr(ctx, e.Value) + if valType == nil { + return nil + } + var dir types.ChanDir + switch { + case e.Dir&ast.SEND != 0 && e.Dir&ast.RECV != 0: + dir = types.SendRecv + case e.Dir&ast.SEND != 0: + dir = types.SendOnly + case e.Dir&ast.RECV != 0: + dir = types.RecvOnly + default: + return nil + } + return &pattern{ + op: opChan, + value: dir, + subs: []*pattern{valType}, + } + + case *ast.ParenExpr: + return parseExpr(ctx, e.X) + + case *ast.FuncType: + var params []*pattern + var results []*pattern + if e.Params != nil { + for _, field := range e.Params.List { + p := parseExpr(ctx, field.Type) + if p == nil { + return nil + } + if len(field.Names) != 0 { + return nil + } + params = append(params, p) + } + } + if e.Results != nil { + for _, field := range e.Results.List { + p := parseExpr(ctx, field.Type) + if p == nil { + return nil + } + if len(field.Names) != 0 { + return nil + } + results = append(results, p) + } + } + return &pattern{ + op: opFunc, + value: len(params), + subs: append(params, results...), + } + + case *ast.StructType: + hasSeq := false + members := make([]*pattern, 0, len(e.Fields.List)) + for _, field := range e.Fields.List { + p := parseExpr(ctx, field.Type) + if p == nil { + return nil + } + if len(field.Names) != 0 { + return nil + } + if p.op == opVarSeq { + hasSeq = true + } + members = append(members, p) + } + op := opStructNoSeq + if hasSeq { + op = opStruct + } + return &pattern{ + op: op, + subs: members, + } + + case *ast.InterfaceType: + if len(e.Methods.List) == 0 { + return &pattern{op: opBuiltinType, value: efaceType} + } + } + + return nil +} + +func (p *Pattern) MatchIdentical(typ types.Type) bool { + p.reset() + return p.matchIdentical(p.root, typ) +} + +func (p *Pattern) reset() { + if len(p.int64Matches) != 0 { + p.int64Matches = map[string]int64{} + } + if len(p.typeMatches) != 0 { + p.typeMatches = map[string]types.Type{} + } +} + +func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool { + // TODO: do backtracking. + + numFields := f.NumFields() + fieldsMatched := 0 + + if len(subs) == 0 && numFields != 0 { + return false + } + + matchAny := false + + i := 0 + for i < len(subs) { + pat := subs[i] + + if pat.op == opVarSeq { + matchAny = true + } + + fieldsLeft := numFields - fieldsMatched + if matchAny { + switch { + // "Nothing left to match" stop condition. + case fieldsLeft == 0: + matchAny = false + i++ + // Lookahead for non-greedy matching. + case i+1 < len(subs) && p.matchIdentical(subs[i+1], f.Field(fieldsMatched).Type()): + matchAny = false + i += 2 + fieldsMatched++ + default: + fieldsMatched++ + } + continue + } + + if fieldsLeft == 0 || !p.matchIdentical(pat, f.Field(fieldsMatched).Type()) { + return false + } + i++ + fieldsMatched++ + } + + return numFields == fieldsMatched +} + +func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { + switch sub.op { + case opVar: + name := sub.value.(string) + if name == "_" { + return true + } + y, ok := p.typeMatches[name] + if !ok { + p.typeMatches[name] = typ + return true + } + if y == nil { + return typ == nil + } + return types.Identical(typ, y) + + case opBuiltinType: + return types.Identical(typ, sub.value.(types.Type)) + + case opPointer: + typ, ok := typ.(*types.Pointer) + if !ok { + return false + } + return p.matchIdentical(sub.subs[0], typ.Elem()) + + case opSlice: + typ, ok := typ.(*types.Slice) + if !ok { + return false + } + return p.matchIdentical(sub.subs[0], typ.Elem()) + + case opArray: + typ, ok := typ.(*types.Array) + if !ok { + return false + } + var wantLen int64 + switch v := sub.value.(type) { + case string: + if v == "_" { + wantLen = typ.Len() + break + } + length, ok := p.int64Matches[v] + if ok { + wantLen = length + } else { + p.int64Matches[v] = typ.Len() + wantLen = typ.Len() + } + case int64: + wantLen = v + } + return wantLen == typ.Len() && p.matchIdentical(sub.subs[0], typ.Elem()) + + case opMap: + typ, ok := typ.(*types.Map) + if !ok { + return false + } + return p.matchIdentical(sub.subs[0], typ.Key()) && + p.matchIdentical(sub.subs[1], typ.Elem()) + + case opChan: + typ, ok := typ.(*types.Chan) + if !ok { + return false + } + dir := sub.value.(types.ChanDir) + return dir == typ.Dir() && p.matchIdentical(sub.subs[0], typ.Elem()) + + case opNamed: + typ, ok := typ.(*types.Named) + if !ok { + return false + } + obj := typ.Obj() + pkg := obj.Pkg() + // pkg can be nil for builtin named types. + // There is no point in checking anything else as we never + // generate the opNamed for such types. + if pkg == nil { + return false + } + pkgPath := sub.value.([2]string)[0] + typeName := sub.value.([2]string)[1] + return obj.Pkg().Path() == pkgPath && typeName == obj.Name() + + case opFunc: + typ, ok := typ.(*types.Signature) + if !ok { + return false + } + numParams := sub.value.(int) + params := sub.subs[:numParams] + results := sub.subs[numParams:] + if typ.Params().Len() != len(params) { + return false + } + if typ.Results().Len() != len(results) { + return false + } + for i := 0; i < typ.Params().Len(); i++ { + if !p.matchIdentical(params[i], typ.Params().At(i).Type()) { + return false + } + } + for i := 0; i < typ.Results().Len(); i++ { + if !p.matchIdentical(results[i], typ.Results().At(i).Type()) { + return false + } + } + return true + + case opStructNoSeq: + typ, ok := typ.(*types.Struct) + if !ok { + return false + } + if typ.NumFields() != len(sub.subs) { + return false + } + for i, member := range sub.subs { + if !p.matchIdentical(member, typ.Field(i).Type()) { + return false + } + } + return true + + case opStruct: + typ, ok := typ.(*types.Struct) + if !ok { + return false + } + if !p.matchIdenticalFielder(sub.subs, typ) { + return false + } + return true + + default: + return false + } +} + +type fielder interface { + Field(i int) *types.Var + NumFields() int +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go new file mode 100644 index 0000000..71a6cc0 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go @@ -0,0 +1,201 @@ +package ruleguard + +import ( + "go/ast" + "go/parser" + "go/printer" + "go/token" + "go/types" + "strconv" + "strings" +) + +func sprintNode(fset *token.FileSet, n ast.Node) string { + if fset == nil { + fset = token.NewFileSet() + } + var buf strings.Builder + if err := printer.Fprint(&buf, fset, n); err != nil { + return "" + } + return buf.String() +} + +var basicTypeByName = map[string]types.Type{ + "bool": types.Typ[types.Bool], + "int": types.Typ[types.Int], + "int8": types.Typ[types.Int8], + "int16": types.Typ[types.Int16], + "int32": types.Typ[types.Int32], + "int64": types.Typ[types.Int64], + "uint": types.Typ[types.Uint], + "uint8": types.Typ[types.Uint8], + "uint16": types.Typ[types.Uint16], + "uint32": types.Typ[types.Uint32], + "uint64": types.Typ[types.Uint64], + "uintptr": types.Typ[types.Uintptr], + "float32": types.Typ[types.Float32], + "float64": types.Typ[types.Float64], + "complex64": types.Typ[types.Complex64], + "complex128": types.Typ[types.Complex128], + "string": types.Typ[types.String], +} + +func typeFromString(s string) (types.Type, error) { + s = strings.ReplaceAll(s, "?", "__any") + + n, err := parser.ParseExpr(s) + if err != nil { + return nil, err + } + return typeFromNode(n), nil +} + +func typeFromNode(e ast.Expr) types.Type { + switch e := e.(type) { + case *ast.Ident: + basic, ok := basicTypeByName[e.Name] + if ok { + return basic + } + + case *ast.ArrayType: + elem := typeFromNode(e.Elt) + if elem == nil { + return nil + } + if e.Len == nil { + return types.NewSlice(elem) + } + lit, ok := e.Len.(*ast.BasicLit) + if !ok || lit.Kind != token.INT { + return nil + } + length, err := strconv.Atoi(lit.Value) + if err != nil { + return nil + } + types.NewArray(elem, int64(length)) + + case *ast.MapType: + keyType := typeFromNode(e.Key) + if keyType == nil { + return nil + } + valType := typeFromNode(e.Value) + if valType == nil { + return nil + } + return types.NewMap(keyType, valType) + + case *ast.StarExpr: + typ := typeFromNode(e.X) + if typ != nil { + return types.NewPointer(typ) + } + + case *ast.ParenExpr: + return typeFromNode(e.X) + + case *ast.InterfaceType: + if len(e.Methods.List) == 0 { + return types.NewInterfaceType(nil, nil) + } + } + + return nil +} + +// isPure reports whether expr is a softly safe expression and contains +// no significant side-effects. As opposed to strictly safe expressions, +// soft safe expressions permit some forms of side-effects, like +// panic possibility during indexing or nil pointer dereference. +// +// Uses types info to determine type conversion expressions that +// are the only permitted kinds of call expressions. +// Note that is does not check whether called function really +// has any side effects. The analysis is very conservative. +func isPure(info *types.Info, expr ast.Expr) bool { + // This list switch is not comprehensive and uses + // whitelist to be on the conservative side. + // Can be extended as needed. + + switch expr := expr.(type) { + case *ast.StarExpr: + return isPure(info, expr.X) + case *ast.BinaryExpr: + return isPure(info, expr.X) && + isPure(info, expr.Y) + case *ast.UnaryExpr: + return expr.Op != token.ARROW && + isPure(info, expr.X) + case *ast.BasicLit, *ast.Ident: + return true + case *ast.IndexExpr: + return isPure(info, expr.X) && + isPure(info, expr.Index) + case *ast.SelectorExpr: + return isPure(info, expr.X) + case *ast.ParenExpr: + return isPure(info, expr.X) + case *ast.CompositeLit: + return isPureList(info, expr.Elts) + case *ast.CallExpr: + return isTypeExpr(info, expr.Fun) && isPureList(info, expr.Args) + + default: + return false + } +} + +// isPureList reports whether every expr in list is safe. +// +// See isPure. +func isPureList(info *types.Info, list []ast.Expr) bool { + for _, expr := range list { + if !isPure(info, expr) { + return false + } + } + return true +} + +func isAddressable(info *types.Info, expr ast.Expr) bool { + tv, ok := info.Types[expr] + return ok && tv.Addressable() +} + +func isConstant(info *types.Info, expr ast.Expr) bool { + tv, ok := info.Types[expr] + return ok && tv.Value != nil +} + +// isTypeExpr reports whether x represents a type expression. +// +// Type expression does not evaluate to any run time value, +// but rather describes a type that is used inside Go expression. +// +// For example, (*T)(v) is a CallExpr that "calls" (*T). +// (*T) is a type expression that tells Go compiler type v should be converted to. +func isTypeExpr(info *types.Info, x ast.Expr) bool { + switch x := x.(type) { + case *ast.StarExpr: + return isTypeExpr(info, x.X) + case *ast.ParenExpr: + return isTypeExpr(info, x.X) + case *ast.SelectorExpr: + return isTypeExpr(info, x.Sel) + + case *ast.Ident: + // Identifier may be a type expression if object + // it reffers to is a type name. + _, ok := info.ObjectOf(x).(*types.TypeName) + return ok + + case *ast.FuncType, *ast.StructType, *ast.InterfaceType, *ast.ArrayType, *ast.MapType, *ast.ChanType: + return true + + default: + return false + } +} diff --git a/vendor/github.com/quasilyte/regex/syntax/LICENSE b/vendor/github.com/quasilyte/regex/syntax/LICENSE new file mode 100644 index 0000000..f0c8128 --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Iskander (Alex) Sharipov / quasilyte + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/quasilyte/regex/syntax/README.md b/vendor/github.com/quasilyte/regex/syntax/README.md new file mode 100644 index 0000000..13064ec --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/README.md @@ -0,0 +1,26 @@ +# Package `regex/syntax` + +Package `syntax` provides regular expressions parser as well as AST definitions. + +## Rationale + +There are several problems with the stdlib [regexp/syntax](https://golang.org/pkg/regexp/syntax/) package: + +1. It does several transformations during the parsing that make it + hard to do any kind of syntax analysis afterward. + +2. The AST used there is optimized for the compilation and + execution inside the [regexp](https://golang.org/pkg/regexp) package. + It's somewhat complicated, especially in a way character ranges are encoded. + +3. It only supports [re2](https://github.com/google/re2/wiki/Syntax) syntax. + This parser recognizes most PCRE operations. + +4. It's easier to extend this package than something from the standard library. + +This package does almost no assumptions about how generated AST is going to be used +so it preserves as much syntax information as possible. + +It's easy to write another intermediate representation on top of it. The main +function of this package is to convert a textual regexp pattern into a more +structured form that can be processed more easily. diff --git a/vendor/github.com/quasilyte/regex/syntax/ast.go b/vendor/github.com/quasilyte/regex/syntax/ast.go new file mode 100644 index 0000000..44b7b61 --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/ast.go @@ -0,0 +1,147 @@ +package syntax + +import ( + "fmt" + "strings" +) + +type Regexp struct { + Pattern string + Expr Expr +} + +type RegexpPCRE struct { + Pattern string + Expr Expr + + Source string + Modifiers string + Delim [2]byte +} + +func (re *RegexpPCRE) HasModifier(mod byte) bool { + return strings.IndexByte(re.Modifiers, mod) >= 0 +} + +type Expr struct { + // The operations that this expression performs. See `operation.go`. + Op Operation + + Form Form + + _ [2]byte // Reserved + + // Pos describes a source location inside regexp pattern. + Pos Position + + // Args is a list of sub-expressions of this expression. + // + // See Operation constants documentation to learn how to + // interpret the particular expression args. + Args []Expr + + // Value holds expression textual value. + // + // Usually, that value is identical to src[Begin():End()], + // but this is not true for programmatically generated objects. + Value string +} + +// Begin returns expression leftmost offset. +func (e Expr) Begin() uint16 { return e.Pos.Begin } + +// End returns expression rightmost offset. +func (e Expr) End() uint16 { return e.Pos.End } + +// LastArg returns expression last argument. +// +// Should not be called on expressions that may have 0 arguments. +func (e Expr) LastArg() Expr { + return e.Args[len(e.Args)-1] +} + +type Operation byte + +type Form byte + +func FormatSyntax(re *Regexp) string { + return formatExprSyntax(re, re.Expr) +} + +func formatExprSyntax(re *Regexp, e Expr) string { + switch e.Op { + case OpChar, OpLiteral: + switch e.Value { + case "{": + return "'{'" + case "}": + return "'}'" + default: + return e.Value + } + case OpString, OpEscapeChar, OpEscapeMeta, OpEscapeOctal, OpEscapeUni, OpEscapeHex, OpPosixClass: + return e.Value + case OpRepeat: + return fmt.Sprintf("(repeat %s %s)", formatExprSyntax(re, e.Args[0]), e.Args[1].Value) + case OpCaret: + return "^" + case OpDollar: + return "$" + case OpDot: + return "." + case OpQuote: + return fmt.Sprintf("(q %s)", e.Value) + case OpCharRange: + return fmt.Sprintf("%s-%s", formatExprSyntax(re, e.Args[0]), formatExprSyntax(re, e.Args[1])) + case OpCharClass: + return fmt.Sprintf("[%s]", formatArgsSyntax(re, e.Args)) + case OpNegCharClass: + return fmt.Sprintf("[^%s]", formatArgsSyntax(re, e.Args)) + case OpConcat: + return fmt.Sprintf("{%s}", formatArgsSyntax(re, e.Args)) + case OpAlt: + return fmt.Sprintf("(or %s)", formatArgsSyntax(re, e.Args)) + case OpCapture: + return fmt.Sprintf("(capture %s)", formatExprSyntax(re, e.Args[0])) + case OpNamedCapture: + return fmt.Sprintf("(capture %s %s)", formatExprSyntax(re, e.Args[0]), e.Args[1].Value) + case OpGroup: + return fmt.Sprintf("(group %s)", formatExprSyntax(re, e.Args[0])) + case OpAtomicGroup: + return fmt.Sprintf("(atomic %s)", formatExprSyntax(re, e.Args[0])) + case OpGroupWithFlags: + return fmt.Sprintf("(group %s ?%s)", formatExprSyntax(re, e.Args[0]), e.Args[1].Value) + case OpFlagOnlyGroup: + return fmt.Sprintf("(flags ?%s)", formatExprSyntax(re, e.Args[0])) + case OpPositiveLookahead: + return fmt.Sprintf("(?= %s)", formatExprSyntax(re, e.Args[0])) + case OpNegativeLookahead: + return fmt.Sprintf("(?! %s)", formatExprSyntax(re, e.Args[0])) + case OpPositiveLookbehind: + return fmt.Sprintf("(?<= %s)", formatExprSyntax(re, e.Args[0])) + case OpNegativeLookbehind: + return fmt.Sprintf("(?", e.Op) + } +} + +func formatArgsSyntax(re *Regexp, args []Expr) string { + parts := make([]string, len(args)) + for i, e := range args { + parts[i] = formatExprSyntax(re, e) + } + return strings.Join(parts, " ") +} diff --git a/vendor/github.com/quasilyte/regex/syntax/errors.go b/vendor/github.com/quasilyte/regex/syntax/errors.go new file mode 100644 index 0000000..cfafc1d --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/errors.go @@ -0,0 +1,27 @@ +package syntax + +import ( + "fmt" +) + +type ParseError struct { + Pos Position + Message string +} + +func (e ParseError) Error() string { return e.Message } + +func throwfPos(pos Position, format string, args ...interface{}) { + panic(ParseError{ + Pos: pos, + Message: fmt.Sprintf(format, args...), + }) +} + +func throwErrorf(posBegin, posEnd int, format string, args ...interface{}) { + pos := Position{ + Begin: uint16(posBegin), + End: uint16(posEnd), + } + throwfPos(pos, format, args...) +} diff --git a/vendor/github.com/quasilyte/regex/syntax/go.mod b/vendor/github.com/quasilyte/regex/syntax/go.mod new file mode 100644 index 0000000..2a4e1f3 --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/go.mod @@ -0,0 +1,3 @@ +module github.com/quasilyte/regex/syntax + +go 1.14 diff --git a/vendor/github.com/quasilyte/regex/syntax/lexer.go b/vendor/github.com/quasilyte/regex/syntax/lexer.go new file mode 100644 index 0000000..e92b038 --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/lexer.go @@ -0,0 +1,455 @@ +package syntax + +import ( + "strings" + "unicode" + "unicode/utf8" +) + +type token struct { + kind tokenKind + pos Position +} + +func (tok token) String() string { + return tok.kind.String() +} + +type tokenKind byte + +//go:generate stringer -type=tokenKind -trimprefix=tok -linecomment=true +const ( + tokNone tokenKind = iota + + tokChar + tokGroupFlags + tokPosixClass + tokConcat + tokRepeat + tokEscapeChar + tokEscapeMeta + tokEscapeOctal + tokEscapeUni + tokEscapeUniFull + tokEscapeHex + tokEscapeHexFull + tokComment + + tokQ // \Q + tokMinus // - + tokLbracket // [ + tokLbracketCaret // [^ + tokRbracket // ] + tokDollar // $ + tokCaret // ^ + tokQuestion // ? + tokDot // . + tokPlus // + + tokStar // * + tokPipe // | + tokLparen // ( + tokLparenName // (?P + tokLparenNameAngle // (? + tokLparenNameQuote // (?'name' + tokLparenFlags // (?flags + tokLparenAtomic // (?> + tokLparenPositiveLookahead // (?= + tokLparenPositiveLookbehind // (?<= + tokLparenNegativeLookahead // (?! + tokLparenNegativeLookbehind // (? unicode.MaxASCII { + _, size := utf8.DecodeRuneInString(l.input[l.pos:]) + l.pushTok(tokChar, size) + l.maybeInsertConcat() + continue + } + switch ch { + case '\\': + l.scanEscape(false) + case '.': + l.pushTok(tokDot, 1) + case '+': + l.pushTok(tokPlus, 1) + case '*': + l.pushTok(tokStar, 1) + case '^': + l.pushTok(tokCaret, 1) + case '$': + l.pushTok(tokDollar, 1) + case '?': + l.pushTok(tokQuestion, 1) + case ')': + l.pushTok(tokRparen, 1) + case '|': + l.pushTok(tokPipe, 1) + case '[': + if l.byteAt(l.pos+1) == '^' { + l.pushTok(tokLbracketCaret, 2) + } else { + l.pushTok(tokLbracket, 1) + } + l.scanCharClass() + case '(': + if l.byteAt(l.pos+1) == '?' { + switch { + case l.byteAt(l.pos+2) == '>': + l.pushTok(tokLparenAtomic, len("(?>")) + case l.byteAt(l.pos+2) == '=': + l.pushTok(tokLparenPositiveLookahead, len("(?=")) + case l.byteAt(l.pos+2) == '!': + l.pushTok(tokLparenNegativeLookahead, len("(?!")) + case l.byteAt(l.pos+2) == '<' && l.byteAt(l.pos+3) == '=': + l.pushTok(tokLparenPositiveLookbehind, len("(?<=")) + case l.byteAt(l.pos+2) == '<' && l.byteAt(l.pos+3) == '!': + l.pushTok(tokLparenNegativeLookbehind, len("(?= 0 { + l.pushTok(tokRepeat, len("{")+j) + } else { + l.pushTok(tokChar, 1) + } + default: + l.pushTok(tokChar, 1) + } + l.maybeInsertConcat() + } +} + +func (l *lexer) scanCharClass() { + l.maybeInsertConcat() + + // We need to handle first `]` in a special way. See #3. + if l.byteAt(l.pos) == ']' { + l.pushTok(tokChar, 1) + } + + for l.pos < len(l.input) { + ch := l.input[l.pos] + if ch > unicode.MaxASCII { + _, size := utf8.DecodeRuneInString(l.input[l.pos:]) + l.pushTok(tokChar, size) + continue + } + switch ch { + case '\\': + l.scanEscape(true) + case '[': + isPosixClass := false + if l.byteAt(l.pos+1) == ':' { + j := l.stringIndex(l.pos+2, ":]") + if j >= 0 { + isPosixClass = true + l.pushTok(tokPosixClass, j+len("[::]")) + } + } + if !isPosixClass { + l.pushTok(tokChar, 1) + } + case '-': + l.pushTok(tokMinus, 1) + case ']': + l.pushTok(tokRbracket, 1) + return // Stop scanning in the char context + default: + l.pushTok(tokChar, 1) + } + } +} + +func (l *lexer) scanEscape(insideCharClass bool) { + s := l.input + if l.pos+1 >= len(s) { + throwErrorf(l.pos, l.pos+1, `unexpected end of pattern: trailing '\'`) + } + switch { + case s[l.pos+1] == 'p' || s[l.pos+1] == 'P': + if l.pos+2 >= len(s) { + throwErrorf(l.pos, l.pos+2, "unexpected end of pattern: expected uni-class-short or '{'") + } + if s[l.pos+2] == '{' { + j := strings.IndexByte(s[l.pos+2:], '}') + if j < 0 { + throwErrorf(l.pos, l.pos+2, "can't find closing '}'") + } + l.pushTok(tokEscapeUniFull, len(`\p{`)+j) + } else { + l.pushTok(tokEscapeUni, len(`\pL`)) + } + case s[l.pos+1] == 'x': + if l.pos+2 >= len(s) { + throwErrorf(l.pos, l.pos+2, "unexpected end of pattern: expected hex-digit or '{'") + } + if s[l.pos+2] == '{' { + j := strings.IndexByte(s[l.pos+2:], '}') + if j < 0 { + throwErrorf(l.pos, l.pos+2, "can't find closing '}'") + } + l.pushTok(tokEscapeHexFull, len(`\x{`)+j) + } else { + if isHexDigit(l.byteAt(l.pos + 3)) { + l.pushTok(tokEscapeHex, len(`\xFF`)) + } else { + l.pushTok(tokEscapeHex, len(`\xF`)) + } + } + case isOctalDigit(s[l.pos+1]): + digits := 1 + if isOctalDigit(l.byteAt(l.pos + 2)) { + if isOctalDigit(l.byteAt(l.pos + 3)) { + digits = 3 + } else { + digits = 2 + } + } + l.pushTok(tokEscapeOctal, len(`\`)+digits) + case s[l.pos+1] == 'Q': + size := len(s) - l.pos // Until the pattern ends + j := l.stringIndex(l.pos+2, `\E`) + if j >= 0 { + size = j + len(`\Q\E`) + } + l.pushTok(tokQ, size) + + default: + ch := l.byteAt(l.pos + 1) + if ch > unicode.MaxASCII { + _, size := utf8.DecodeRuneInString(l.input[l.pos+1:]) + l.pushTok(tokEscapeChar, len(`\`)+size) + return + } + kind := tokEscapeChar + if insideCharClass { + if charClassMetachar[ch] { + kind = tokEscapeMeta + } + } else { + if reMetachar[ch] { + kind = tokEscapeMeta + } + } + l.pushTok(kind, 2) + } +} + +func (l *lexer) maybeInsertConcat() { + if l.isConcatPos() { + last := len(l.tokens) - 1 + tok := l.tokens[last] + l.tokens[last].kind = tokConcat + l.tokens = append(l.tokens, tok) + } +} + +func (l *lexer) Init(s string) { + l.pos = 0 + l.tokens = l.tokens[:0] + l.input = s + + l.scan() + + l.pos = 0 +} + +func (l *lexer) tryScanGroupName(pos int) bool { + tok := tokLparenName + endCh := byte('>') + offset := 1 + switch l.byteAt(pos) { + case '\'': + endCh = '\'' + tok = tokLparenNameQuote + case '<': + tok = tokLparenNameAngle + case 'P': + offset = 2 + default: + return false + } + if pos+offset >= len(l.input) { + return false + } + end := strings.IndexByte(l.input[pos+offset:], endCh) + if end < 0 { + return false + } + l.pushTok(tok, len("(?")+offset+end+1) + return true +} + +func (l *lexer) tryScanGroupFlags(pos int) bool { + colonPos := strings.IndexByte(l.input[pos:], ':') + parenPos := strings.IndexByte(l.input[pos:], ')') + if parenPos < 0 { + return false + } + end := parenPos + if colonPos >= 0 && colonPos < parenPos { + end = colonPos + len(":") + } + l.pushTok(tokLparenFlags, len("(?")+end) + return true +} + +func (l *lexer) tryScanComment(pos int) bool { + if l.byteAt(pos) != '#' { + return false + } + parenPos := strings.IndexByte(l.input[pos:], ')') + if parenPos < 0 { + return false + } + l.pushTok(tokComment, len("(?")+parenPos+len(")")) + return true +} + +func (l *lexer) repeatWidth(pos int) int { + j := pos + for isDigit(l.byteAt(j)) { + j++ + } + if j == pos { + return -1 + } + if l.byteAt(j) == '}' { + return (j + len("}")) - pos // {min} + } + if l.byteAt(j) != ',' { + return -1 + } + j += len(",") + for isDigit(l.byteAt(j)) { + j++ + } + if l.byteAt(j) == '}' { + return (j + len("}")) - pos // {min,} or {min,max} + } + return -1 +} + +func (l *lexer) stringIndex(offset int, s string) int { + if offset < len(l.input) { + return strings.Index(l.input[offset:], s) + } + return -1 +} + +func (l *lexer) byteAt(pos int) byte { + if pos >= 0 && pos < len(l.input) { + return l.input[pos] + } + return 0 +} + +func (l *lexer) pushTok(kind tokenKind, size int) { + l.tokens = append(l.tokens, token{ + kind: kind, + pos: Position{Begin: uint16(l.pos), End: uint16(l.pos + size)}, + }) + l.pos += size +} + +func (l *lexer) isConcatPos() bool { + if len(l.tokens) < 2 { + return false + } + x := l.tokens[len(l.tokens)-2].kind + if concatTable[x]&concatX != 0 { + return false + } + y := l.tokens[len(l.tokens)-1].kind + return concatTable[y]&concatY == 0 +} + +const ( + concatX byte = 1 << iota + concatY +) + +var concatTable = [256]byte{ + tokPipe: concatX | concatY, + + tokLparen: concatX, + tokLparenFlags: concatX, + tokLparenName: concatX, + tokLparenNameAngle: concatX, + tokLparenNameQuote: concatX, + tokLparenAtomic: concatX, + tokLbracket: concatX, + tokLbracketCaret: concatX, + tokLparenPositiveLookahead: concatX, + tokLparenPositiveLookbehind: concatX, + tokLparenNegativeLookahead: concatX, + tokLparenNegativeLookbehind: concatX, + + tokRparen: concatY, + tokRbracket: concatY, + tokPlus: concatY, + tokStar: concatY, + tokQuestion: concatY, + tokRepeat: concatY, +} diff --git a/vendor/github.com/quasilyte/regex/syntax/operation.go b/vendor/github.com/quasilyte/regex/syntax/operation.go new file mode 100644 index 0000000..284e5dc --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/operation.go @@ -0,0 +1,189 @@ +package syntax + +//go:generate stringer -type=Operation -trimprefix=Op +const ( + OpNone Operation = iota + + // OpConcat is a concatenation of ops. + // Examples: `xy` `abc\d` `` + // Args - concatenated ops + // + // As a special case, OpConcat with 0 Args is used for "empty" + // set of operations. + OpConcat + + // OpDot is a '.' wildcard. + OpDot + + // OpAlt is x|y alternation of ops. + // Examples: `a|bc` `x(.*?)|y(.*?)` + // Args - union-connected regexp branches + OpAlt + + // OpStar is a shorthand for {0,} repetition. + // Examples: `x*` + // Args[0] - repeated expression + OpStar + + // OpPlus is a shorthand for {1,} repetition. + // Examples: `x+` + // Args[0] - repeated expression + OpPlus + + // OpQuestion is a shorthand for {0,1} repetition. + // Examples: `x?` + // Args[0] - repeated expression + OpQuestion + + // OpNonGreedy makes its operand quantifier non-greedy. + // Examples: `x??` `x*?` `x+?` + // Args[0] - quantified expression + OpNonGreedy + + // OpPossessive makes its operand quantifier possessive. + // Examples: `x?+` `x*+` `x++` + // Args[0] - quantified expression + OpPossessive + + // OpCaret is ^ anchor. + OpCaret + + // OpDollar is $ anchor. + OpDollar + + // OpLiteral is a collection of consecutive chars. + // Examples: `ab` `10x` + // Args - enclosed characters (OpChar) + OpLiteral + + // OpChar is a single literal pattern character. + // Examples: `a` `6` `ф` + OpChar + + // OpString is an artificial element that is used in other expressions. + OpString + + // OpQuote is a \Q...\E enclosed literal. + // Examples: `\Q.?\E` `\Q?q[]=1` + // + // Note that closing \E is not mandatory. + OpQuote + + // OpEscapeChar is a single char escape. + // Examples: `\d` `\a` `\n` + OpEscapeChar + + // OpEscapeMeta is an escaped meta char. + // Examples: `\(` `\[` `\+` + OpEscapeMeta + + // OpEscapeOctal is an octal char code escape (up to 3 digits). + // Examples: `\123` `\12` + OpEscapeOctal + + // OpEscapeHex is a hex char code escape. + // Examples: `\x7F` `\xF7` + // FormEscapeHexFull examples: `\x{10FFFF}` `\x{F}`. + OpEscapeHex + + // OpEscapeUni is a Unicode char class escape. + // Examples: `\pS` `\pL` `\PL` + // FormEscapeUniFull examples: `\p{Greek}` `\p{Symbol}` `\p{^L}` + OpEscapeUni + + // OpCharClass is a char class enclosed in []. + // Examples: `[abc]` `[a-z0-9\]]` + // Args - char class elements (can include OpCharRange and OpPosixClass). + OpCharClass + + // OpNegCharClass is a negated char class enclosed in []. + // Examples: `[^abc]` `[^a-z0-9\]]` + // Args - char class elements (can include OpCharRange and OpPosixClass). + OpNegCharClass + + // OpCharRange is an inclusive char range inside a char class. + // Examples: `0-9` `A-Z` + // Args[0] - range lower bound (OpChar or OpEscape). + // Args[1] - range upper bound (OpChar or OpEscape). + OpCharRange + + // OpPosixClass is a named ASCII char set inside a char class. + // Examples: `[:alpha:]` `[:blank:]` + OpPosixClass + + // OpRepeat is a {min,max} repetition quantifier. + // Examples: `x{5}` `x{min,max}` `x{min,}` + // Args[0] - repeated expression + // Args[1] - repeat count (OpString) + OpRepeat + + // OpCapture is `(re)` capturing group. + // Examples: `(abc)` `(x|y)` + // Args[0] - enclosed expression + OpCapture + + // OpNamedCapture is `(?Pre)` capturing group. + // Examples: `(?Pabc)` `(?Px|y)` + // FormNamedCaptureAngle examples: `(?abc)` `(?x|y)` + // FormNamedCaptureQuote examples: `(?'foo'abc)` `(?'name'x|y)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + // Args[1] - group name (OpString) + OpNamedCapture + + // OpGroup is `(?:re)` non-capturing group. + // Examples: `(?:abc)` `(?:x|y)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpGroup + + // OpGroupWithFlags is `(?flags:re)` non-capturing group. + // Examples: `(?i:abc)` `(?i:x|y)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + // Args[1] - flags (OpString) + OpGroupWithFlags + + // OpAtomicGroup is `(?>re)` non-capturing group without backtracking. + // Examples: `(?>foo)` `(?>)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpAtomicGroup + + // OpPositiveLookahead is `(?=re)` asserts that following text matches re. + // Examples: `(?=foo)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpPositiveLookahead + + // OpNegativeLookahead is `(?!re)` asserts that following text doesn't match re. + // Examples: `(?!foo)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpNegativeLookahead + + // OpPositiveLookbehind is `(?<=re)` asserts that preceding text matches re. + // Examples: `(?<=foo)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpPositiveLookbehind + + // OpNegativeLookbehind is `(?=re)` asserts that preceding text doesn't match re. + // Examples: `(?= Operation(len(_Operation_index)-1) { + return "Operation(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _Operation_name[_Operation_index[i]:_Operation_index[i+1]] +} diff --git a/vendor/github.com/quasilyte/regex/syntax/parser.go b/vendor/github.com/quasilyte/regex/syntax/parser.go new file mode 100644 index 0000000..faf0f8b --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/parser.go @@ -0,0 +1,471 @@ +package syntax + +import ( + "errors" + "fmt" + "strings" +) + +type ParserOptions struct { + // NoLiterals disables OpChar merging into OpLiteral. + NoLiterals bool +} + +func NewParser(opts *ParserOptions) *Parser { + return newParser(opts) +} + +type Parser struct { + out Regexp + lexer lexer + exprPool []Expr + + prefixParselets [256]prefixParselet + infixParselets [256]infixParselet + + charClass []Expr + allocated uint + + opts ParserOptions +} + +// ParsePCRE parses PHP-style pattern with delimiters. +// An example of such pattern is `/foo/i`. +func (p *Parser) ParsePCRE(pattern string) (*RegexpPCRE, error) { + pcre, err := p.newPCRE(pattern) + if err != nil { + return nil, err + } + if pcre.HasModifier('x') { + return nil, errors.New("'x' modifier is not supported") + } + re, err := p.Parse(pcre.Pattern) + if re != nil { + pcre.Expr = re.Expr + } + return pcre, err +} + +func (p *Parser) Parse(pattern string) (result *Regexp, err error) { + defer func() { + r := recover() + if r == nil { + return + } + if err2, ok := r.(ParseError); ok { + err = err2 + return + } + panic(r) + }() + + p.lexer.Init(pattern) + p.allocated = 0 + p.out.Pattern = pattern + if pattern == "" { + p.out.Expr = *p.newExpr(OpConcat, Position{}) + } else { + p.out.Expr = *p.parseExpr(0) + } + + if !p.opts.NoLiterals { + p.mergeChars(&p.out.Expr) + } + p.setValues(&p.out.Expr) + + return &p.out, nil +} + +type prefixParselet func(token) *Expr + +type infixParselet func(*Expr, token) *Expr + +func newParser(opts *ParserOptions) *Parser { + var p Parser + + if opts != nil { + p.opts = *opts + } + p.exprPool = make([]Expr, 256) + + for tok, op := range tok2op { + if op != 0 { + p.prefixParselets[tokenKind(tok)] = p.parsePrefixElementary + } + } + + p.prefixParselets[tokEscapeHexFull] = func(tok token) *Expr { + return p.newExprForm(OpEscapeHex, FormEscapeHexFull, tok.pos) + } + p.prefixParselets[tokEscapeUniFull] = func(tok token) *Expr { + return p.newExprForm(OpEscapeUni, FormEscapeUniFull, tok.pos) + } + + p.prefixParselets[tokLparen] = func(tok token) *Expr { return p.parseGroup(OpCapture, tok) } + p.prefixParselets[tokLparenAtomic] = func(tok token) *Expr { return p.parseGroup(OpAtomicGroup, tok) } + p.prefixParselets[tokLparenPositiveLookahead] = func(tok token) *Expr { return p.parseGroup(OpPositiveLookahead, tok) } + p.prefixParselets[tokLparenNegativeLookahead] = func(tok token) *Expr { return p.parseGroup(OpNegativeLookahead, tok) } + p.prefixParselets[tokLparenPositiveLookbehind] = func(tok token) *Expr { return p.parseGroup(OpPositiveLookbehind, tok) } + p.prefixParselets[tokLparenNegativeLookbehind] = func(tok token) *Expr { return p.parseGroup(OpNegativeLookbehind, tok) } + + p.prefixParselets[tokLparenName] = func(tok token) *Expr { + return p.parseNamedCapture(FormDefault, tok) + } + p.prefixParselets[tokLparenNameAngle] = func(tok token) *Expr { + return p.parseNamedCapture(FormNamedCaptureAngle, tok) + } + p.prefixParselets[tokLparenNameQuote] = func(tok token) *Expr { + return p.parseNamedCapture(FormNamedCaptureQuote, tok) + } + + p.prefixParselets[tokLparenFlags] = p.parseGroupWithFlags + + p.prefixParselets[tokPipe] = func(tok token) *Expr { + // We need prefix pipe parselet to handle `(|x)` syntax. + right := p.parseExpr(1) + return p.newExpr(OpAlt, tok.pos, p.newEmpty(tok.pos), right) + } + p.prefixParselets[tokLbracket] = func(tok token) *Expr { + return p.parseCharClass(OpCharClass, tok) + } + p.prefixParselets[tokLbracketCaret] = func(tok token) *Expr { + return p.parseCharClass(OpNegCharClass, tok) + } + + p.infixParselets[tokRepeat] = func(left *Expr, tok token) *Expr { + repeatLit := p.newExpr(OpString, tok.pos) + return p.newExpr(OpRepeat, combinePos(left.Pos, tok.pos), left, repeatLit) + } + p.infixParselets[tokStar] = func(left *Expr, tok token) *Expr { + return p.newExpr(OpStar, combinePos(left.Pos, tok.pos), left) + } + p.infixParselets[tokConcat] = func(left *Expr, tok token) *Expr { + right := p.parseExpr(2) + if left.Op == OpConcat { + left.Args = append(left.Args, *right) + left.Pos.End = right.End() + return left + } + return p.newExpr(OpConcat, combinePos(left.Pos, right.Pos), left, right) + } + p.infixParselets[tokPipe] = p.parseAlt + p.infixParselets[tokMinus] = p.parseMinus + p.infixParselets[tokPlus] = p.parsePlus + p.infixParselets[tokQuestion] = p.parseQuestion + + return &p +} + +func (p *Parser) setValues(e *Expr) { + for i := range e.Args { + p.setValues(&e.Args[i]) + } + e.Value = p.exprValue(e) +} + +func (p *Parser) exprValue(e *Expr) string { + return p.out.Pattern[e.Begin():e.End()] +} + +func (p *Parser) mergeChars(e *Expr) { + for i := range e.Args { + p.mergeChars(&e.Args[i]) + } + if e.Op != OpConcat || len(e.Args) < 2 { + return + } + + args := e.Args[:0] + i := 0 + for i < len(e.Args) { + first := i + chars := 0 + for j := i; j < len(e.Args) && e.Args[j].Op == OpChar; j++ { + chars++ + } + if chars > 1 { + c1 := e.Args[first] + c2 := e.Args[first+chars-1] + lit := p.newExpr(OpLiteral, combinePos(c1.Pos, c2.Pos)) + for j := 0; j < chars; j++ { + lit.Args = append(lit.Args, e.Args[first+j]) + } + args = append(args, *lit) + i += chars + } else { + args = append(args, e.Args[i]) + i++ + } + } + if len(args) == 1 { + *e = args[0] // Turn OpConcat into OpLiteral + } else { + e.Args = args + } +} + +func (p *Parser) newEmpty(pos Position) *Expr { + return p.newExpr(OpConcat, pos) +} + +func (p *Parser) newExprForm(op Operation, form Form, pos Position, args ...*Expr) *Expr { + e := p.newExpr(op, pos, args...) + e.Form = form + return e +} + +func (p *Parser) newExpr(op Operation, pos Position, args ...*Expr) *Expr { + e := p.allocExpr() + *e = Expr{ + Op: op, + Pos: pos, + Args: e.Args[:0], + } + for _, arg := range args { + e.Args = append(e.Args, *arg) + } + return e +} + +func (p *Parser) allocExpr() *Expr { + i := p.allocated + if i < uint(len(p.exprPool)) { + p.allocated++ + return &p.exprPool[i] + } + return &Expr{} +} + +func (p *Parser) expect(kind tokenKind) Position { + tok := p.lexer.NextToken() + if tok.kind != kind { + throwErrorf(int(tok.pos.Begin), int(tok.pos.End), "expected '%s', found '%s'", kind, tok.kind) + } + return tok.pos +} + +func (p *Parser) parseExpr(precedence int) *Expr { + tok := p.lexer.NextToken() + prefix := p.prefixParselets[tok.kind] + if prefix == nil { + throwfPos(tok.pos, "unexpected token: %v", tok) + } + left := prefix(tok) + + for precedence < p.precedenceOf(p.lexer.Peek()) { + tok := p.lexer.NextToken() + infix := p.infixParselets[tok.kind] + left = infix(left, tok) + } + + return left +} + +func (p *Parser) parsePrefixElementary(tok token) *Expr { + return p.newExpr(tok2op[tok.kind], tok.pos) +} + +func (p *Parser) parseCharClass(op Operation, tok token) *Expr { + var endPos Position + p.charClass = p.charClass[:0] + for { + p.charClass = append(p.charClass, *p.parseExpr(0)) + next := p.lexer.Peek() + if next.kind == tokRbracket { + endPos = next.pos + p.lexer.NextToken() + break + } + if next.kind == tokNone { + throwfPos(tok.pos, "unterminated '['") + } + } + + result := p.newExpr(op, combinePos(tok.pos, endPos)) + result.Args = append(result.Args, p.charClass...) + return result +} + +func (p *Parser) parseMinus(left *Expr, tok token) *Expr { + if p.isValidCharRangeOperand(left) { + if p.lexer.Peek().kind != tokRbracket { + right := p.parseExpr(2) + return p.newExpr(OpCharRange, combinePos(left.Pos, right.Pos), left, right) + } + } + p.charClass = append(p.charClass, *left) + return p.newExpr(OpChar, tok.pos) +} + +func (p *Parser) isValidCharRangeOperand(e *Expr) bool { + switch e.Op { + case OpEscapeHex, OpEscapeOctal, OpEscapeMeta, OpChar: + return true + case OpEscapeChar: + switch p.exprValue(e) { + case `\\`, `\|`, `\*`, `\+`, `\?`, `\.`, `\[`, `\^`, `\$`, `\(`, `\)`: + return true + } + } + return false +} + +func (p *Parser) parsePlus(left *Expr, tok token) *Expr { + op := OpPlus + switch left.Op { + case OpPlus, OpStar, OpQuestion, OpRepeat: + op = OpPossessive + } + return p.newExpr(op, combinePos(left.Pos, tok.pos), left) +} + +func (p *Parser) parseQuestion(left *Expr, tok token) *Expr { + op := OpQuestion + switch left.Op { + case OpPlus, OpStar, OpQuestion, OpRepeat: + op = OpNonGreedy + } + return p.newExpr(op, combinePos(left.Pos, tok.pos), left) +} + +func (p *Parser) parseAlt(left *Expr, tok token) *Expr { + var right *Expr + switch p.lexer.Peek().kind { + case tokRparen, tokNone: + // This is needed to handle `(x|)` syntax. + right = p.newEmpty(tok.pos) + default: + right = p.parseExpr(1) + } + if left.Op == OpAlt { + left.Args = append(left.Args, *right) + left.Pos.End = right.End() + return left + } + return p.newExpr(OpAlt, combinePos(left.Pos, right.Pos), left, right) +} + +func (p *Parser) parseGroupItem(tok token) *Expr { + if p.lexer.Peek().kind == tokRparen { + // This is needed to handle `() syntax.` + return p.newEmpty(tok.pos) + } + return p.parseExpr(0) +} + +func (p *Parser) parseGroup(op Operation, tok token) *Expr { + x := p.parseGroupItem(tok) + result := p.newExpr(op, tok.pos, x) + result.Pos.End = p.expect(tokRparen).End + return result +} + +func (p *Parser) parseNamedCapture(form Form, tok token) *Expr { + prefixLen := len("(?<") + if form == FormDefault { + prefixLen = len("(?P<") + } + name := p.newExpr(OpString, Position{ + Begin: tok.pos.Begin + uint16(prefixLen), + End: tok.pos.End - uint16(len(">")), + }) + x := p.parseGroupItem(tok) + result := p.newExprForm(OpNamedCapture, form, tok.pos, x, name) + result.Pos.End = p.expect(tokRparen).End + return result +} + +func (p *Parser) parseGroupWithFlags(tok token) *Expr { + var result *Expr + val := p.out.Pattern[tok.pos.Begin+1 : tok.pos.End] + switch { + case !strings.HasSuffix(val, ":"): + flags := p.newExpr(OpString, Position{ + Begin: tok.pos.Begin + uint16(len("(?")), + End: tok.pos.End, + }) + result = p.newExpr(OpFlagOnlyGroup, tok.pos, flags) + case val == "?:": + x := p.parseGroupItem(tok) + result = p.newExpr(OpGroup, tok.pos, x) + default: + flags := p.newExpr(OpString, Position{ + Begin: tok.pos.Begin + uint16(len("(?")), + End: tok.pos.End - uint16(len(":")), + }) + x := p.parseGroupItem(tok) + result = p.newExpr(OpGroupWithFlags, tok.pos, x, flags) + } + result.Pos.End = p.expect(tokRparen).End + return result +} + +func (p *Parser) precedenceOf(tok token) int { + switch tok.kind { + case tokPipe: + return 1 + case tokConcat, tokMinus: + return 2 + case tokPlus, tokStar, tokQuestion, tokRepeat: + return 3 + default: + return 0 + } +} + +func (p *Parser) newPCRE(source string) (*RegexpPCRE, error) { + if source == "" { + return nil, errors.New("empty pattern: can't find delimiters") + } + + delim := source[0] + endDelim := delim + switch delim { + case '(': + endDelim = ')' + case '{': + endDelim = '}' + case '[': + endDelim = ']' + case '<': + endDelim = '>' + case '\\': + return nil, errors.New("'\\' is not a valid delimiter") + default: + if isSpace(delim) { + return nil, errors.New("whitespace is not a valid delimiter") + } + if isAlphanumeric(delim) { + return nil, fmt.Errorf("'%c' is not a valid delimiter", delim) + } + } + + j := strings.LastIndexByte(source, endDelim) + if j == -1 { + return nil, fmt.Errorf("can't find '%c' ending delimiter", endDelim) + } + + pcre := &RegexpPCRE{ + Pattern: source[1:j], + Source: source, + Delim: [2]byte{delim, endDelim}, + Modifiers: source[j+1:], + } + return pcre, nil +} + +var tok2op = [256]Operation{ + tokDollar: OpDollar, + tokCaret: OpCaret, + tokDot: OpDot, + tokChar: OpChar, + tokMinus: OpChar, + tokEscapeChar: OpEscapeChar, + tokEscapeMeta: OpEscapeMeta, + tokEscapeHex: OpEscapeHex, + tokEscapeOctal: OpEscapeOctal, + tokEscapeUni: OpEscapeUni, + tokPosixClass: OpPosixClass, + tokQ: OpQuote, + tokComment: OpComment, +} diff --git a/vendor/github.com/quasilyte/regex/syntax/pos.go b/vendor/github.com/quasilyte/regex/syntax/pos.go new file mode 100644 index 0000000..51bdbf8 --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/pos.go @@ -0,0 +1,10 @@ +package syntax + +type Position struct { + Begin uint16 + End uint16 +} + +func combinePos(begin, end Position) Position { + return Position{Begin: begin.Begin, End: end.End} +} diff --git a/vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go b/vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go new file mode 100644 index 0000000..8800436 --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go @@ -0,0 +1,59 @@ +// Code generated by "stringer -type=tokenKind -trimprefix=tok -linecomment=true"; DO NOT EDIT. + +package syntax + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[tokNone-0] + _ = x[tokChar-1] + _ = x[tokGroupFlags-2] + _ = x[tokPosixClass-3] + _ = x[tokConcat-4] + _ = x[tokRepeat-5] + _ = x[tokEscapeChar-6] + _ = x[tokEscapeMeta-7] + _ = x[tokEscapeOctal-8] + _ = x[tokEscapeUni-9] + _ = x[tokEscapeUniFull-10] + _ = x[tokEscapeHex-11] + _ = x[tokEscapeHexFull-12] + _ = x[tokComment-13] + _ = x[tokQ-14] + _ = x[tokMinus-15] + _ = x[tokLbracket-16] + _ = x[tokLbracketCaret-17] + _ = x[tokRbracket-18] + _ = x[tokDollar-19] + _ = x[tokCaret-20] + _ = x[tokQuestion-21] + _ = x[tokDot-22] + _ = x[tokPlus-23] + _ = x[tokStar-24] + _ = x[tokPipe-25] + _ = x[tokLparen-26] + _ = x[tokLparenName-27] + _ = x[tokLparenNameAngle-28] + _ = x[tokLparenNameQuote-29] + _ = x[tokLparenFlags-30] + _ = x[tokLparenAtomic-31] + _ = x[tokLparenPositiveLookahead-32] + _ = x[tokLparenPositiveLookbehind-33] + _ = x[tokLparenNegativeLookahead-34] + _ = x[tokLparenNegativeLookbehind-35] + _ = x[tokRparen-36] +} + +const _tokenKind_name = "NoneCharGroupFlagsPosixClassConcatRepeatEscapeCharEscapeMetaEscapeOctalEscapeUniEscapeUniFullEscapeHexEscapeHexFullComment\\Q-[[^]$^?.+*|((?P(?(?'name'(?flags(?>(?=(?<=(?!(?= tokenKind(len(_tokenKind_index)-1) { + return "tokenKind(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _tokenKind_name[_tokenKind_index[i]:_tokenKind_index[i+1]] +} diff --git a/vendor/github.com/quasilyte/regex/syntax/utils.go b/vendor/github.com/quasilyte/regex/syntax/utils.go new file mode 100644 index 0000000..934680c --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/utils.go @@ -0,0 +1,30 @@ +package syntax + +func isSpace(ch byte) bool { + switch ch { + case '\r', '\n', '\t', '\f', '\v': + return true + default: + return false + } +} + +func isAlphanumeric(ch byte) bool { + return (ch >= 'a' && ch <= 'z') || + (ch >= 'A' && ch <= 'Z') || + (ch >= '0' && ch <= '9') +} + +func isDigit(ch byte) bool { + return ch >= '0' && ch <= '9' +} + +func isOctalDigit(ch byte) bool { + return ch >= '0' && ch <= '7' +} + +func isHexDigit(ch byte) bool { + return (ch >= '0' && ch <= '9') || + (ch >= 'a' && ch <= 'f') || + (ch >= 'A' && ch <= 'F') +} diff --git a/vendor/github.com/ryancurrah/gomodguard/.gitignore b/vendor/github.com/ryancurrah/gomodguard/.gitignore index 5131b46..030056d 100644 --- a/vendor/github.com/ryancurrah/gomodguard/.gitignore +++ b/vendor/github.com/ryancurrah/gomodguard/.gitignore @@ -19,3 +19,5 @@ *.xml dist/ + +coverage.* \ No newline at end of file diff --git a/vendor/github.com/ryancurrah/gomodguard/.golangci.yml b/vendor/github.com/ryancurrah/gomodguard/.golangci.yml new file mode 100644 index 0000000..9c19e63 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/.golangci.yml @@ -0,0 +1,6 @@ +linters: + enable-all: true + disable: + - funlen + - gochecknoglobals + - lll diff --git a/vendor/github.com/ryancurrah/gomodguard/.gomodguard.yaml b/vendor/github.com/ryancurrah/gomodguard/.gomodguard.yaml deleted file mode 100644 index c0f061f..0000000 --- a/vendor/github.com/ryancurrah/gomodguard/.gomodguard.yaml +++ /dev/null @@ -1,18 +0,0 @@ -allowed: - modules: # List of allowed modules - - gopkg.in/yaml.v2 - - github.com/go-xmlfmt/xmlfmt - - github.com/phayes/checkstyle - domains: # List of allowed module domains - - golang.org - -blocked: - modules: # List of blocked modules - - github.com/uudashr/go-module: # Blocked module - recommendations: # Recommended modules that should be used instead (Optional) - - golang.org/x/mod - reason: "`mod` is the official go.mod parser library." # Reason why the recommended module should be used (Optional) - - github.com/mitchellh/go-homedir: - recommendations: - - github.com/ryancurrah/gomodguard - reason: "testing if the linted module is not blocked when it is recommended" diff --git a/vendor/github.com/ryancurrah/gomodguard/Makefile b/vendor/github.com/ryancurrah/gomodguard/Makefile index d765f52..9af2f76 100644 --- a/vendor/github.com/ryancurrah/gomodguard/Makefile +++ b/vendor/github.com/ryancurrah/gomodguard/Makefile @@ -3,7 +3,7 @@ version = $(shell printf '%s' $$(cat VERSION)) .PHONEY: lint lint: - golangci-lint run -v --enable-all --disable funlen,gochecknoglobals,lll ./... + golangci-lint run ./... .PHONEY: build build: @@ -17,6 +17,14 @@ dockerbuild: run: build ./gomodguard +.PHONEY: test +test: + go test -v -coverprofile coverage.out + +.PHONEY: cover +cover: + gocover-cobertura < coverage.out > coverage.xml + .PHONEY: dockerrun dockerrun: dockerbuild docker run -v "${current_dir}/.gomodguard.yaml:/.gomodguard.yaml" ryancurrah/gomodguard:latest @@ -30,8 +38,12 @@ release: .PHONEY: clean clean: rm -rf dist/ - rm -f gomodguard + rm -f gomodguard coverage.xml coverage.out .PHONEY: install-tools-mac install-tools-mac: brew install goreleaser/tap/goreleaser + +.PHONEY: install-go-tools +install-go-tools: + go get github.com/t-yuki/gocover-cobertura diff --git a/vendor/github.com/ryancurrah/gomodguard/README.md b/vendor/github.com/ryancurrah/gomodguard/README.md index 89a2398..8e2e416 100644 --- a/vendor/github.com/ryancurrah/gomodguard/README.md +++ b/vendor/github.com/ryancurrah/gomodguard/README.md @@ -1,4 +1,10 @@ # gomodguard +[![License](https://img.shields.io/github/license/ryancurrah/gomodguard?style=flat-square)](/LICENSE) +[![Codecov](https://img.shields.io/codecov/c/gh/ryancurrah/gomodguard?style=flat-square)](https://codecov.io/gh/ryancurrah/gomodguard) +[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/ryancurrah/gomodguard/Go?logo=Go&style=flat-square)](https://github.com/ryancurrah/gomodguard/actions?query=workflow%3AGo) +[![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/ryancurrah/gomodguard?style=flat-square)](https://github.com/ryancurrah/gomodguard/releases/latest) +[![Docker](https://img.shields.io/docker/pulls/ryancurrah/gomodguard?style=flat-square)](https://hub.docker.com/r/ryancurrah/gomodguard) +[![Github Releases Stats of golangci-lint](https://img.shields.io/github/downloads/ryancurrah/gomodguard/total.svg?logo=github&style=flat-square)](https://somsubhra.com/github-release-stats/?username=ryancurrah&repository=gomodguard) @@ -6,7 +12,7 @@ Allow and block list linter for direct Go module dependencies. This is useful fo ## Description -Allowed and blocked modules are defined in a `.gomodguard.yaml` or `~/.gomodguard.yaml` file. +Allowed and blocked modules are defined in a `./.gomodguard.yaml` or `~/.gomodguard.yaml` file. Modules can be allowed by module or domain name. When allowed modules are specified any modules not in the allowed configuration are blocked. @@ -18,6 +24,8 @@ Alternative modules can be optionally recommended in the blocked modules list. If the linted module imports a blocked module but the linted module is in the recommended modules list the blocked module is ignored. Usually, this means the linted module wraps that blocked module for use by other modules, therefore the import of the blocked module should not be blocked. +Version constraints can be specified for modules as well which lets you block new or old versions of modules or specific versions. + Results are printed to `stdout`. Logging statements are printed to `stderr`. @@ -42,6 +50,10 @@ blocked: recommendations: # Recommended modules that should be used instead (Optional) - golang.org/x/mod reason: "`mod` is the official go.mod parser library." # Reason why the recommended module should be used (Optional) + versions: # List of blocked module version constraints. + - github.com/mitchellh/go-homedir: # Blocked module with version constraint. + version: "<= 1.1.0" # Version constraint, see https://github.com/Masterminds/semver#basic-comparisons. + reason: "testing if blocked version constraint works." # Reason why the version constraint exists. ``` ## Usage @@ -52,17 +64,22 @@ Usage: gomodguard [files...] Also supports package syntax but will use it in relative path, i.e. ./pkg/... Flags: -f string - Report results to the specified file. A report type must also be specified + Report results to the specified file. A report type must also be specified -file string - -h Show this help text + -h Show this help text -help - -n Don't lint test files + -i int + Exit code when issues were found (default 2) + -issues-exit-code int + (default 2) + + -n Don't lint test files -no-test -r string - Report results to one of the following formats: checkstyle. A report file destination must also be specified + Report results to one of the following formats: checkstyle. A report file destination must also be specified -report string ``` diff --git a/vendor/github.com/ryancurrah/gomodguard/VERSION b/vendor/github.com/ryancurrah/gomodguard/VERSION index 3e7bcf0..795460f 100644 --- a/vendor/github.com/ryancurrah/gomodguard/VERSION +++ b/vendor/github.com/ryancurrah/gomodguard/VERSION @@ -1 +1 @@ -v1.0.4 +v1.1.0 diff --git a/vendor/github.com/ryancurrah/gomodguard/cmd.go b/vendor/github.com/ryancurrah/gomodguard/cmd.go new file mode 100644 index 0000000..89a22ae --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/cmd.go @@ -0,0 +1,244 @@ +package gomodguard + +import ( + "flag" + "fmt" + "io/ioutil" + "log" + "os" + "path/filepath" + "strings" + + "github.com/go-xmlfmt/xmlfmt" + "github.com/mitchellh/go-homedir" + "github.com/phayes/checkstyle" + "gopkg.in/yaml.v2" +) + +const ( + errFindingHomedir = "unable to find home directory, %w" + errReadingConfigFile = "could not read config file: %w" + errParsingConfigFile = "could not parse config file: %w" +) + +var ( + configFile = ".gomodguard.yaml" + logger = log.New(os.Stderr, "", 0) + errFindingConfigFile = fmt.Errorf("could not find config file") +) + +// Run the gomodguard linter. Returns the exit code to use. +func Run() int { + var ( + args []string + help bool + noTest bool + report string + reportFile string + issuesExitCode int + cwd, _ = os.Getwd() + ) + + flag.BoolVar(&help, "h", false, "Show this help text") + flag.BoolVar(&help, "help", false, "") + flag.BoolVar(&noTest, "n", false, "Don't lint test files") + flag.BoolVar(&noTest, "no-test", false, "") + flag.StringVar(&report, "r", "", "Report results to one of the following formats: checkstyle. A report file destination must also be specified") + flag.StringVar(&report, "report", "", "") + flag.StringVar(&reportFile, "f", "", "Report results to the specified file. A report type must also be specified") + flag.StringVar(&reportFile, "file", "", "") + flag.IntVar(&issuesExitCode, "i", 2, "Exit code when issues were found") + flag.IntVar(&issuesExitCode, "issues-exit-code", 2, "") + flag.Parse() + + report = strings.TrimSpace(strings.ToLower(report)) + + if help { + showHelp() + return 0 + } + + if report != "" && report != "checkstyle" { + logger.Fatalf("error: invalid report type '%s'", report) + } + + if report != "" && reportFile == "" { + logger.Fatalf("error: a report file must be specified when a report is enabled") + } + + if report == "" && reportFile != "" { + logger.Fatalf("error: a report type must be specified when a report file is enabled") + } + + args = flag.Args() + if len(args) == 0 { + args = []string{"./..."} + } + + config, err := GetConfig(configFile) + if err != nil { + logger.Fatalf("error: %s", err) + } + + filteredFiles := GetFilteredFiles(cwd, noTest, args) + + processor, err := NewProcessor(config) + if err != nil { + logger.Fatalf("error: %s", err) + } + + logger.Printf("info: allowed modules, %+v", config.Allowed.Modules) + logger.Printf("info: allowed module domains, %+v", config.Allowed.Domains) + logger.Printf("info: blocked modules, %+v", config.Blocked.Modules.Get()) + logger.Printf("info: blocked modules with version constraints, %+v", config.Blocked.Versions.Get()) + + results := processor.ProcessFiles(filteredFiles) + + if report == "checkstyle" { + err := WriteCheckstyle(reportFile, results) + if err != nil { + logger.Fatalf("error: %s", err) + } + } + + for _, r := range results { + fmt.Println(r.String()) + } + + if len(results) > 0 { + return issuesExitCode + } + + return 0 +} + +// GetConfig from YAML file. +func GetConfig(configFile string) (*Configuration, error) { + config := Configuration{} + + home, err := homedir.Dir() + if err != nil { + return nil, fmt.Errorf(errFindingHomedir, err) + } + + cfgFile := "" + homeDirCfgFile := filepath.Join(home, configFile) + + switch { + case fileExists(configFile): + cfgFile = configFile + case fileExists(homeDirCfgFile): + cfgFile = homeDirCfgFile + default: + return nil, fmt.Errorf("%w: %s %s", errFindingConfigFile, configFile, homeDirCfgFile) + } + + data, err := ioutil.ReadFile(cfgFile) + if err != nil { + return nil, fmt.Errorf(errReadingConfigFile, err) + } + + err = yaml.Unmarshal(data, &config) + if err != nil { + return nil, fmt.Errorf(errParsingConfigFile, err) + } + + return &config, nil +} + +// GetFilteredFiles returns files based on search string arguments and filters. +func GetFilteredFiles(cwd string, skipTests bool, args []string) []string { + var ( + foundFiles = []string{} + filteredFiles = []string{} + ) + + for _, f := range args { + if strings.HasSuffix(f, "/...") { + dir, _ := filepath.Split(f) + + foundFiles = append(foundFiles, expandGoWildcard(dir)...) + + continue + } + + if _, err := os.Stat(f); err == nil { + foundFiles = append(foundFiles, f) + } + } + + // Use relative path to print shorter names, sort out test foundFiles if chosen. + for _, f := range foundFiles { + if skipTests { + if strings.HasSuffix(f, "_test.go") { + continue + } + } + + if relativePath, err := filepath.Rel(cwd, f); err == nil { + filteredFiles = append(filteredFiles, relativePath) + + continue + } + + filteredFiles = append(filteredFiles, f) + } + + return filteredFiles +} + +// showHelp text for command line. +func showHelp() { + helpText := `Usage: gomodguard [files...] +Also supports package syntax but will use it in relative path, i.e. ./pkg/... +Flags:` + fmt.Println(helpText) + flag.PrintDefaults() +} + +// WriteCheckstyle takes the results and writes them to a checkstyle formated file. +func WriteCheckstyle(checkstyleFilePath string, results []Result) error { + check := checkstyle.New() + + for i := range results { + file := check.EnsureFile(results[i].FileName) + file.AddError(checkstyle.NewError(results[i].LineNumber, 1, checkstyle.SeverityError, results[i].Reason, "gomodguard")) + } + + checkstyleXML := fmt.Sprintf("\n%s", check.String()) + + err := ioutil.WriteFile(checkstyleFilePath, []byte(xmlfmt.FormatXML(checkstyleXML, "", " ")), 0644) // nolint:gosec + if err != nil { + return err + } + + return nil +} + +// fileExists returns true if the file path provided exists. +func fileExists(filename string) bool { + info, err := os.Stat(filename) + if os.IsNotExist(err) { + return false + } + + return !info.IsDir() +} + +// expandGoWildcard path provided. +func expandGoWildcard(root string) []string { + foundFiles := []string{} + + _ = filepath.Walk(root, func(path string, info os.FileInfo, err error) error { + // Only append go foundFiles. + if !strings.HasSuffix(info.Name(), ".go") { + return nil + } + + foundFiles = append(foundFiles, path) + + return nil + }) + + return foundFiles +} diff --git a/vendor/github.com/ryancurrah/gomodguard/go.mod b/vendor/github.com/ryancurrah/gomodguard/go.mod index 0f0e92e..1f17483 100644 --- a/vendor/github.com/ryancurrah/gomodguard/go.mod +++ b/vendor/github.com/ryancurrah/gomodguard/go.mod @@ -3,9 +3,10 @@ module github.com/ryancurrah/gomodguard go 1.14 require ( + github.com/Masterminds/semver v1.5.0 github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b github.com/mitchellh/go-homedir v1.1.0 github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d - golang.org/x/mod v0.2.0 - gopkg.in/yaml.v2 v2.2.8 + golang.org/x/mod v0.4.0 + gopkg.in/yaml.v2 v2.4.0 ) diff --git a/vendor/github.com/ryancurrah/gomodguard/go.sum b/vendor/github.com/ryancurrah/gomodguard/go.sum index 0f4bf32..ccbc5e0 100644 --- a/vendor/github.com/ryancurrah/gomodguard/go.sum +++ b/vendor/github.com/ryancurrah/gomodguard/go.sum @@ -1,3 +1,5 @@ +github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo= github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= @@ -6,8 +8,8 @@ github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d h1:CdDQnGF8Nq9oc github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0 h1:8pl+sMODzuvGJkmj2W4kZihvVb5mKm8pB/X44PIQHv8= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -20,5 +22,5 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 h1:/atklqdjdhuosWIl6AIbO golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= diff --git a/vendor/github.com/ryancurrah/gomodguard/gomodguard.go b/vendor/github.com/ryancurrah/gomodguard/gomodguard.go index cd4f7d6..3f2cc0d 100644 --- a/vendor/github.com/ryancurrah/gomodguard/gomodguard.go +++ b/vendor/github.com/ryancurrah/gomodguard/gomodguard.go @@ -7,30 +7,90 @@ import ( "go/parser" "go/token" "io/ioutil" - "log" "os" "os/exec" "strings" + "github.com/Masterminds/semver" + "golang.org/x/mod/modfile" ) +const ( + goModFilename = "go.mod" + errReadingGoModFile = "unable to read go mod file %s: %w" + errParsingGoModFile = "unable to parsing go mod file %s: %w" +) + var ( - blockedReasonNotInAllowedList = "import of package `%s` is blocked because the module is not in the allowed modules list." - blockedReasonInBlockedList = "import of package `%s` is blocked because the module is in the blocked modules list." - goModFilename = "go.mod" + blockReasonNotInAllowedList = "import of package `%s` is blocked because the module is not in the allowed modules list." + blockReasonInBlockedList = "import of package `%s` is blocked because the module is in the blocked modules list." + blockReasonHasLocalReplaceDirective = "import of package `%s` is blocked because the module has a local replace directive." ) -// Recommendations are alternative modules to use and a reason why. -type Recommendations struct { +// BlockedVersion has a version constraint a reason why the the module version is blocked. +type BlockedVersion struct { + Version string `yaml:"version"` + Reason string `yaml:"reason"` +} + +// IsLintedModuleVersionBlocked returns true if a version constraint is specified and the +// linted module version matches the constraint. +func (r *BlockedVersion) IsLintedModuleVersionBlocked(lintedModuleVersion string) bool { + if r.Version == "" { + return false + } + + constraint, err := semver.NewConstraint(r.Version) + if err != nil { + return false + } + + version, err := semver.NewVersion(lintedModuleVersion) + if err != nil { + return false + } + + meet := constraint.Check(version) + + return meet +} + +// Message returns the reason why the module version is blocked. +func (r *BlockedVersion) Message(lintedModuleVersion string) string { + msg := "" + + // Add version contraint to message. + msg += fmt.Sprintf("version `%s` is blocked because it does not meet the version constraint `%s`.", lintedModuleVersion, r.Version) + + if r.Reason == "" { + return msg + } + + // Add reason to message. + msg += fmt.Sprintf(" %s.", strings.TrimRight(r.Reason, ".")) + + return msg +} + +// BlockedModule has alternative modules to use and a reason why the module is blocked. +type BlockedModule struct { Recommendations []string `yaml:"recommendations"` Reason string `yaml:"reason"` } -// IsRecommended returns true if the package provided is in the Recommendations list -func (r *Recommendations) IsRecommended(pkg string) bool { +// IsCurrentModuleARecommendation returns true if the current module is in the Recommendations list. +// +// If the current go.mod file being linted is a recommended module of a +// blocked module and it imports that blocked module, do not set as blocked. +// This could mean that the linted module is a wrapper for that blocked module. +func (r *BlockedModule) IsCurrentModuleARecommendation(currentModuleName string) bool { + if r == nil { + return false + } + for n := range r.Recommendations { - if strings.TrimSpace(pkg) == strings.TrimSpace(r.Recommendations[n]) { + if strings.TrimSpace(currentModuleName) == strings.TrimSpace(r.Recommendations[n]) { return true } } @@ -38,14 +98,11 @@ func (r *Recommendations) IsRecommended(pkg string) bool { return false } -// String returns the recommended modules and reason message. -func (r *Recommendations) String() string { +// Message returns the reason why the module is blocked and a list of recommended modules if provided. +func (r *BlockedModule) Message() string { msg := "" - if r == nil { - return msg - } - + // Add recommendations to message for i := range r.Recommendations { switch { case len(r.Recommendations) == 1: @@ -59,8 +116,15 @@ func (r *Recommendations) String() string { } } - if r.Reason != "" { - msg += fmt.Sprintf(" %s", r.Reason) + if r.Reason == "" { + return msg + } + + // Add reason to message + if msg == "" { + msg = fmt.Sprintf("%s.", strings.TrimRight(r.Reason, ".")) + } else { + msg += fmt.Sprintf(" %s.", strings.TrimRight(r.Reason, ".")) } return msg @@ -68,25 +132,24 @@ func (r *Recommendations) String() string { // HasRecommendations returns true if the blocked package has // recommended modules. -func (r *Recommendations) HasRecommendations() bool { +func (r *BlockedModule) HasRecommendations() bool { + if r == nil { + return false + } + return len(r.Recommendations) > 0 } -// BlockedModule is a blocked module name and -// optionally a list of recommended modules -// and a reason message. -type BlockedModule map[string]Recommendations +// BlockedVersions a list of blocked modules by a version constraint. +type BlockedVersions []map[string]BlockedVersion -// BlockedModules a list of blocked modules. -type BlockedModules []BlockedModule - -// Get returns the modules that are blocked. -func (b BlockedModules) Get() []string { +// Get returns the module names that are blocked. +func (b BlockedVersions) Get() []string { modules := make([]string, len(b)) - for i := range b { - for module := range b[i] { - modules[i] = module + for n := range b { + for module := range b[n] { + modules[n] = module break } } @@ -94,46 +157,47 @@ func (b BlockedModules) Get() []string { return modules } -// RecommendedModules will return a list of recommended modules for the -// package provided. If there is no recommendation nil will be returned. -func (b BlockedModules) RecommendedModules(pkg string) *Recommendations { - for i := range b { - for blockedModule, recommendations := range b[i] { - if strings.HasPrefix(strings.ToLower(pkg), strings.ToLower(blockedModule)) && recommendations.HasRecommendations() { - return &recommendations +// GetBlockReason returns a block version if one is set for the provided linted module name. +func (b BlockedVersions) GetBlockReason(lintedModuleName string) *BlockedVersion { + for _, blockedModule := range b { + for blockedModuleName, blockedVersion := range blockedModule { + if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { + return &blockedVersion } - - break } } return nil } -// IsBlockedPackage returns true if the package name is in -// the blocked modules list. -func (b BlockedModules) IsBlockedPackage(pkg string) bool { - blockedModules := b.Get() - for i := range blockedModules { - if strings.HasPrefix(strings.ToLower(pkg), strings.ToLower(blockedModules[i])) { - return true +// BlockedModules a list of blocked modules. +type BlockedModules []map[string]BlockedModule + +// Get returns the module names that are blocked. +func (b BlockedModules) Get() []string { + modules := make([]string, len(b)) + + for n := range b { + for module := range b[n] { + modules[n] = module + break } } - return false + return modules } -// IsBlockedModule returns true if the given module name is in the -// blocked modules list. -func (b BlockedModules) IsBlockedModule(module string) bool { - blockedModules := b.Get() - for i := range blockedModules { - if strings.EqualFold(module, strings.TrimSpace(blockedModules[i])) { - return true +// GetBlockReason returns a block module if one is set for the provided linted module name. +func (b BlockedModules) GetBlockReason(lintedModuleName string) *BlockedModule { + for _, blockedModule := range b { + for blockedModuleName, blockedModule := range blockedModule { + if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { + return &blockedModule + } } } - return false + return nil } // Allowed is a list of modules and module @@ -145,10 +209,11 @@ type Allowed struct { // IsAllowedModule returns true if the given module // name is in the allowed modules list. -func (a *Allowed) IsAllowedModule(module string) bool { +func (a *Allowed) IsAllowedModule(moduleName string) bool { allowedModules := a.Modules + for i := range allowedModules { - if strings.EqualFold(module, strings.TrimSpace(allowedModules[i])) { + if strings.TrimSpace(moduleName) == strings.TrimSpace(allowedModules[i]) { return true } } @@ -158,10 +223,11 @@ func (a *Allowed) IsAllowedModule(module string) bool { // IsAllowedModuleDomain returns true if the given modules domain is // in the allowed module domains list. -func (a *Allowed) IsAllowedModuleDomain(module string) bool { +func (a *Allowed) IsAllowedModuleDomain(moduleName string) bool { allowedDomains := a.Domains + for i := range allowedDomains { - if strings.HasPrefix(strings.ToLower(module), strings.TrimSpace(strings.ToLower(allowedDomains[i]))) { + if strings.HasPrefix(strings.TrimSpace(strings.ToLower(moduleName)), strings.TrimSpace(strings.ToLower(allowedDomains[i]))) { return true } } @@ -172,7 +238,9 @@ func (a *Allowed) IsAllowedModuleDomain(module string) bool { // Blocked is a list of modules that are // blocked and not to be used. type Blocked struct { - Modules BlockedModules `yaml:"modules"` + Modules BlockedModules `yaml:"modules"` + Versions BlockedVersions `yaml:"versions"` + LocalReplaceDirectives bool `yaml:"local_replace_directives"` } // Configuration of gomodguard allow and block lists. @@ -192,46 +260,36 @@ type Result struct { // String returns the filename, line // number and reason of a Result. func (r *Result) String() string { - return fmt.Sprintf("%s:%d: %s", r.FileName, r.LineNumber, r.Reason) + return fmt.Sprintf("%s:%d:1 %s", r.FileName, r.LineNumber, r.Reason) } // Processor processes Go files. type Processor struct { - config Configuration - logger *log.Logger - modfile *modfile.File - blockedModulesFromModFile []string - result []Result + Config *Configuration + Modfile *modfile.File + blockedModulesFromModFile map[string][]string + Result []Result } // NewProcessor will create a Processor to lint blocked packages. -func NewProcessor(config Configuration, logger *log.Logger) (*Processor, error) { +func NewProcessor(config *Configuration) (*Processor, error) { goModFileBytes, err := loadGoModFile() if err != nil { - errMsg := fmt.Sprintf("unable to read %s file: %s", goModFilename, err) - - return nil, fmt.Errorf(errMsg) + return nil, fmt.Errorf(errReadingGoModFile, goModFilename, err) } - mfile, err := modfile.Parse(goModFilename, goModFileBytes, nil) + modFile, err := modfile.Parse(goModFilename, goModFileBytes, nil) if err != nil { - errMsg := fmt.Sprintf("unable to parse %s file: %s", goModFilename, err) - - return nil, fmt.Errorf(errMsg) + return nil, fmt.Errorf(errParsingGoModFile, goModFilename, err) } - logger.Printf("info: allowed modules, %+v", config.Allowed.Modules) - logger.Printf("info: allowed module domains, %+v", config.Allowed.Domains) - logger.Printf("info: blocked modules, %+v", config.Blocked.Modules.Get()) - p := &Processor{ - config: config, - logger: logger, - modfile: mfile, - result: []Result{}, + Config: config, + Modfile: modFile, + Result: []Result{}, } - p.setBlockedModulesFromModFile() + p.SetBlockedModules() return p, nil } @@ -239,18 +297,10 @@ func NewProcessor(config Configuration, logger *log.Logger) (*Processor, error) // ProcessFiles takes a string slice with file names (full paths) // and lints them. func (p *Processor) ProcessFiles(filenames []string) []Result { - pluralModuleMsg := "s" - if len(p.blockedModulesFromModFile) == 1 { - pluralModuleMsg = "" - } - - p.logger.Printf("info: found `%d` blocked module%s in the %s file, %+v", - len(p.blockedModulesFromModFile), pluralModuleMsg, goModFilename, p.blockedModulesFromModFile) - for _, filename := range filenames { data, err := ioutil.ReadFile(filename) if err != nil { - p.result = append(p.result, Result{ + p.Result = append(p.Result, Result{ FileName: filename, LineNumber: 0, Reason: fmt.Sprintf("unable to read file, file cannot be linted (%s)", err.Error()), @@ -260,7 +310,7 @@ func (p *Processor) ProcessFiles(filenames []string) []Result { p.process(filename, data) } - return p.result + return p.Result } // process file imports and add lint error if blocked package is imported. @@ -269,7 +319,7 @@ func (p *Processor) process(filename string, data []byte) { file, err := parser.ParseFile(fileSet, filename, data, parser.ParseComments) if err != nil { - p.result = append(p.result, Result{ + p.Result = append(p.Result, Result{ FileName: filename, LineNumber: 0, Reason: fmt.Sprintf("invalid syntax, file cannot be linted (%s)", err.Error()), @@ -279,23 +329,16 @@ func (p *Processor) process(filename string, data []byte) { } imports := file.Imports - for i := range imports { - importedPkg := strings.TrimSpace(strings.Trim(imports[i].Path.Value, "\"")) - if p.isBlockedPackageFromModFile(importedPkg) { - reason := "" - - if p.config.Blocked.Modules.IsBlockedPackage(importedPkg) { - reason = fmt.Sprintf(blockedReasonInBlockedList, importedPkg) - } else { - reason = fmt.Sprintf(blockedReasonNotInAllowedList, importedPkg) - } + for n := range imports { + importedPkg := strings.TrimSpace(strings.Trim(imports[n].Path.Value, "\"")) - recommendedModules := p.config.Blocked.Modules.RecommendedModules(importedPkg) - if recommendedModules != nil { - reason += fmt.Sprintf(" %s", recommendedModules.String()) - } + blockReasons := p.isBlockedPackageFromModFile(importedPkg) + if blockReasons == nil { + continue + } - p.addError(fileSet, imports[i].Pos(), reason) + for _, blockReason := range blockReasons { + p.addError(fileSet, imports[n].Pos(), blockReason) } } } @@ -305,7 +348,7 @@ func (p *Processor) process(filename string, data []byte) { func (p *Processor) addError(fileset *token.FileSet, pos token.Pos, reason string) { position := fileset.Position(pos) - p.result = append(p.result, Result{ + p.Result = append(p.Result, Result{ FileName: position.Filename, LineNumber: position.Line, Position: position, @@ -313,64 +356,88 @@ func (p *Processor) addError(fileset *token.FileSet, pos token.Pos, reason strin }) } -// setBlockedModules determines which modules are blocked by reading -// the go.mod file and comparing the require modules to the allowed modules. -func (p *Processor) setBlockedModulesFromModFile() { - blockedModules := make([]string, 0, len(p.modfile.Require)) - requiredModules := p.modfile.Require - lintedModule := p.modfile.Module.Mod.Path - - for i := range requiredModules { - if !requiredModules[i].Indirect { - requiredModule := strings.TrimSpace(requiredModules[i].Mod.Path) +// SetBlockedModules determines and sets which modules are blocked by reading +// the go.mod file of the module that is being linted. +// +// It works by iterating over the dependant modules specified in the require +// directive, checking if the module domain or full name is in the allowed list. +func (p *Processor) SetBlockedModules() { //nolint:gocognit + blockedModules := make(map[string][]string, len(p.Modfile.Require)) + currentModuleName := p.Modfile.Module.Mod.Path + lintedModules := p.Modfile.Require + replacedModules := p.Modfile.Replace + + for i := range lintedModules { + if lintedModules[i].Indirect { + continue // Do not lint indirect modules. + } - if p.config.Allowed.IsAllowedModuleDomain(requiredModule) { - continue - } + lintedModuleName := strings.TrimSpace(lintedModules[i].Mod.Path) + lintedModuleVersion := strings.TrimSpace(lintedModules[i].Mod.Version) - if p.config.Allowed.IsAllowedModule(requiredModule) { - continue - } + var isAllowed bool - requiredModuleIsBlocked := p.config.Blocked.Modules.IsBlockedModule(requiredModule) + switch { + case len(p.Config.Allowed.Modules) == 0 && len(p.Config.Allowed.Domains) == 0: + isAllowed = true + case p.Config.Allowed.IsAllowedModuleDomain(lintedModuleName): + isAllowed = true + case p.Config.Allowed.IsAllowedModule(lintedModuleName): + isAllowed = true + default: + isAllowed = false + } - if len(p.config.Allowed.Modules) == 0 && - len(p.config.Allowed.Domains) == 0 && - !requiredModuleIsBlocked { - continue - } + blockModuleReason := p.Config.Blocked.Modules.GetBlockReason(lintedModuleName) + blockVersionReason := p.Config.Blocked.Versions.GetBlockReason(lintedModuleName) - // If the go.mod file being linted is a recommended module of a blocked module - // and it imports that blocked module, do not set as a blocked. This means - // that the linted module wraps that blocked module - if requiredModuleIsBlocked { - recommendedModules := p.config.Blocked.Modules.RecommendedModules(requiredModule) + if !isAllowed && blockModuleReason == nil && blockVersionReason == nil { + blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], blockReasonNotInAllowedList) + continue + } - if recommendedModules.IsRecommended(lintedModule) { - continue - } - } + if blockModuleReason != nil && !blockModuleReason.IsCurrentModuleARecommendation(currentModuleName) { + blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], fmt.Sprintf("%s %s", blockReasonInBlockedList, blockModuleReason.Message())) + } - blockedModules = append(blockedModules, requiredModule) + if blockVersionReason != nil && blockVersionReason.IsLintedModuleVersionBlocked(lintedModuleVersion) { + blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], fmt.Sprintf("%s %s", blockReasonInBlockedList, blockVersionReason.Message(lintedModuleVersion))) } } - if len(blockedModules) > 0 { - p.blockedModulesFromModFile = blockedModules + // Replace directives with local paths are blocked. + // Filesystem paths found in "replace" directives are represented by a path with an empty version. + // https://github.com/golang/mod/blob/bc388b264a244501debfb9caea700c6dcaff10e2/module/module.go#L122-L124 + if p.Config.Blocked.LocalReplaceDirectives { + for i := range replacedModules { + replacedModuleOldName := strings.TrimSpace(replacedModules[i].Old.Path) + replacedModuleNewName := strings.TrimSpace(replacedModules[i].New.Path) + replacedModuleNewVersion := strings.TrimSpace(replacedModules[i].New.Version) + + if replacedModuleNewName != "" && replacedModuleNewVersion == "" { + blockedModules[replacedModuleOldName] = append(blockedModules[replacedModuleOldName], blockReasonHasLocalReplaceDirective) + } + } } + + p.blockedModulesFromModFile = blockedModules } -// isBlockedPackageFromModFile returns true if the imported packages -// module is in the go.mod file and was blocked. -func (p *Processor) isBlockedPackageFromModFile(pkg string) bool { - blockedModulesFromModFile := p.blockedModulesFromModFile - for i := range blockedModulesFromModFile { - if strings.HasPrefix(strings.ToLower(pkg), strings.ToLower(blockedModulesFromModFile[i])) { - return true +// isBlockedPackageFromModFile returns the block reason if the package is blocked. +func (p *Processor) isBlockedPackageFromModFile(packageName string) []string { + for blockedModuleName, blockReasons := range p.blockedModulesFromModFile { + if strings.HasPrefix(strings.TrimSpace(packageName), strings.TrimSpace(blockedModuleName)) { + formattedReasons := make([]string, 0, len(blockReasons)) + + for _, blockReason := range blockReasons { + formattedReasons = append(formattedReasons, fmt.Sprintf(blockReason, packageName)) + } + + return formattedReasons } } - return false + return nil } func loadGoModFile() ([]byte, error) { @@ -386,6 +453,7 @@ func loadGoModFile() ([]byte, error) { _, _ = buf.ReadFrom(stdout) goEnv := make(map[string]string) + err := json.Unmarshal(buf.Bytes(), &goEnv) if err != nil { return ioutil.ReadFile(goModFilename) diff --git a/vendor/github.com/ryanrolds/sqlclosecheck/LICENSE b/vendor/github.com/ryanrolds/sqlclosecheck/LICENSE new file mode 100644 index 0000000..77b261d --- /dev/null +++ b/vendor/github.com/ryanrolds/sqlclosecheck/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2020 Ryan R. Olds + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go b/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go new file mode 100644 index 0000000..bc42dfb --- /dev/null +++ b/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go @@ -0,0 +1,311 @@ +package analyzer + +import ( + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" +) + +const ( + rowsName = "Rows" + stmtName = "Stmt" + closeMethod = "Close" +) + +var ( + sqlPackages = []string{ + "database/sql", + "github.com/jmoiron/sqlx", + } +) + +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "sqlclosecheck", + Doc: "Checks that sql.Rows and sql.Stmt are closed.", + Run: run, + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + }, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + pssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + + // Build list of types we are looking for + targetTypes := getTargetTypes(pssa, sqlPackages) + + // If non of the types are found, skip + if len(targetTypes) == 0 { + return nil, nil + } + + funcs := pssa.SrcFuncs + for _, f := range funcs { + for _, b := range f.Blocks { + for i := range b.Instrs { + // Check if instruction is call that returns a target type + targetValues := getTargetTypesValues(b, i, targetTypes) + if len(targetValues) == 0 { + continue + } + + // log.Printf("%s", f.Name()) + + // For each found target check if they are closed and deferred + for _, targetValue := range targetValues { + refs := (*targetValue.value).Referrers() + isClosed := checkClosed(refs, targetTypes) + if !isClosed { + pass.Reportf((targetValue.instr).Pos(), "Rows/Stmt was not closed") + } + + checkDeferred(pass, refs, targetTypes, false) + } + } + } + } + + return nil, nil +} + +func getTargetTypes(pssa *buildssa.SSA, targetPackages []string) []*types.Pointer { + targets := []*types.Pointer{} + + for _, sqlPkg := range targetPackages { + pkg := pssa.Pkg.Prog.ImportedPackage(sqlPkg) + if pkg == nil { + // the SQL package being checked isn't imported + return targets + } + + rowsType := getTypePointerFromName(pkg, rowsName) + if rowsType != nil { + targets = append(targets, rowsType) + } + + stmtType := getTypePointerFromName(pkg, stmtName) + if stmtType != nil { + targets = append(targets, stmtType) + } + } + + return targets +} + +func getTypePointerFromName(pkg *ssa.Package, name string) *types.Pointer { + pkgType := pkg.Type(name) + if pkgType == nil { + // this package does not use Rows/Stmt + return nil + } + + obj := pkgType.Object() + named, ok := obj.Type().(*types.Named) + if !ok { + return nil + } + + return types.NewPointer(named) +} + +type targetValue struct { + value *ssa.Value + instr ssa.Instruction +} + +func getTargetTypesValues(b *ssa.BasicBlock, i int, targetTypes []*types.Pointer) []targetValue { + targetValues := []targetValue{} + + instr := b.Instrs[i] + call, ok := instr.(*ssa.Call) + if !ok { + return targetValues + } + + signature := call.Call.Signature() + results := signature.Results() + for i := 0; i < results.Len(); i++ { + v := results.At(i) + varType := v.Type() + + for _, targetType := range targetTypes { + if !types.Identical(varType, targetType) { + continue + } + + for _, cRef := range *call.Referrers() { + switch instr := cRef.(type) { + case *ssa.Call: + if len(instr.Call.Args) >= 1 && types.Identical(instr.Call.Args[0].Type(), targetType) { + targetValues = append(targetValues, targetValue{ + value: &instr.Call.Args[0], + instr: call, + }) + } + case ssa.Value: + if types.Identical(instr.Type(), targetType) { + targetValues = append(targetValues, targetValue{ + value: &instr, + instr: call, + }) + } + } + } + } + } + + return targetValues +} + +func checkClosed(refs *[]ssa.Instruction, targetTypes []*types.Pointer) bool { + numInstrs := len(*refs) + for idx, ref := range *refs { + // log.Printf("%T - %s", ref, ref) + + action := getAction(ref, targetTypes) + switch action { + case "closed": + return true + case "passed": + // Passed and not used after + if numInstrs == idx+1 { + return true + } + case "returned": + return true + case "handled": + return true + default: + // log.Printf(action) + } + } + + return false +} + +func getAction(instr ssa.Instruction, targetTypes []*types.Pointer) string { + switch instr := instr.(type) { + case *ssa.Defer: + if instr.Call.Value == nil { + return "unvalued defer" + } + + name := instr.Call.Value.Name() + if name == closeMethod { + return "closed" + } + case *ssa.Call: + if instr.Call.Value == nil { + return "unvalued call" + } + + isTarget := false + receiver := instr.Call.StaticCallee().Signature.Recv() + if receiver != nil { + isTarget = isTargetType(receiver.Type(), targetTypes) + } + + name := instr.Call.Value.Name() + if isTarget && name == closeMethod { + return "closed" + } + + if !isTarget { + return "passed" + } + case *ssa.Phi: + return "passed" + case *ssa.MakeInterface: + return "passed" + case *ssa.Store: + if len(*instr.Addr.Referrers()) == 0 { + return "noop" + } + + for _, aRef := range *instr.Addr.Referrers() { + if c, ok := aRef.(*ssa.MakeClosure); ok { + f := c.Fn.(*ssa.Function) + for _, b := range f.Blocks { + if checkClosed(&b.Instrs, targetTypes) { + return "handled" + } + } + } + } + case *ssa.UnOp: + instrType := instr.Type() + for _, targetType := range targetTypes { + if types.Identical(instrType, targetType) { + if checkClosed(instr.Referrers(), targetTypes) { + return "handled" + } + } + } + case *ssa.FieldAddr: + if checkClosed(instr.Referrers(), targetTypes) { + return "handled" + } + case *ssa.Return: + return "returned" + default: + // log.Printf("%s", instr) + } + + return "unhandled" +} + +func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes []*types.Pointer, inDefer bool) { + for _, instr := range *instrs { + switch instr := instr.(type) { + case *ssa.Defer: + if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod { + return + } + case *ssa.Call: + if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod { + if !inDefer { + pass.Reportf(instr.Pos(), "Close should use defer") + } + + return + } + case *ssa.Store: + if len(*instr.Addr.Referrers()) == 0 { + return + } + + for _, aRef := range *instr.Addr.Referrers() { + if c, ok := aRef.(*ssa.MakeClosure); ok { + f := c.Fn.(*ssa.Function) + + for _, b := range f.Blocks { + checkDeferred(pass, &b.Instrs, targetTypes, true) + } + } + } + case *ssa.UnOp: + instrType := instr.Type() + for _, targetType := range targetTypes { + if types.Identical(instrType, targetType) { + checkDeferred(pass, instr.Referrers(), targetTypes, inDefer) + } + } + case *ssa.FieldAddr: + checkDeferred(pass, instr.Referrers(), targetTypes, inDefer) + } + } +} + +func isTargetType(t types.Type, targetTypes []*types.Pointer) bool { + for _, targetType := range targetTypes { + if types.Identical(t, targetType) { + return true + } + } + + return false +} diff --git a/vendor/github.com/securego/gosec/v2/Dockerfile b/vendor/github.com/securego/gosec/v2/Dockerfile index a874697..c937d52 100644 --- a/vendor/github.com/securego/gosec/v2/Dockerfile +++ b/vendor/github.com/securego/gosec/v2/Dockerfile @@ -8,7 +8,8 @@ RUN go mod download RUN make build-linux FROM golang:${GO_VERSION}-alpine -RUN apk add --update --no-cache ca-certificates git gcc libc-dev +RUN apk add --update --no-cache ca-certificates bash git gcc libc-dev ENV GO111MODULE on COPY --from=builder /build/gosec /bin/gosec -ENTRYPOINT ["/bin/gosec"] +COPY entrypoint.sh /bin/entrypoint.sh +ENTRYPOINT ["/bin/entrypoint.sh"] diff --git a/vendor/github.com/securego/gosec/v2/Makefile b/vendor/github.com/securego/gosec/v2/Makefile index 217651c..b434b60 100644 --- a/vendor/github.com/securego/gosec/v2/Makefile +++ b/vendor/github.com/securego/gosec/v2/Makefile @@ -11,7 +11,7 @@ GOBIN ?= $(GOPATH)/bin GOLINT ?= $(GOBIN)/golint GOSEC ?= $(GOBIN)/gosec GINKGO ?= $(GOBIN)/ginkgo -GO_VERSION = 1.14 +GO_VERSION = 1.15 default: $(MAKE) build diff --git a/vendor/github.com/securego/gosec/v2/README.md b/vendor/github.com/securego/gosec/v2/README.md index 52be734..4237dde 100644 --- a/vendor/github.com/securego/gosec/v2/README.md +++ b/vendor/github.com/securego/gosec/v2/README.md @@ -28,8 +28,8 @@ You may obtain a copy of the License [here](http://www.apache.org/licenses/LICEN ### CI Installation ```bash -# binary will be $GOPATH/bin/gosec -curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s -- -b $GOPATH/bin vX.Y.Z +# binary will be $(go env GOPATH)/bin/gosec +curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s -- -b $(go env GOPATH)/bin vX.Y.Z # or install it into ./bin/ curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s vX.Y.Z @@ -41,7 +41,7 @@ wget -O - -q https://raw.githubusercontent.com/securego/gosec/master/install.sh # then you will have to download a tar.gz file for your operating system instead of a binary file wget https://github.com/securego/gosec/releases/download/vX.Y.Z/gosec_vX.Y.Z_OS.tar.gz -# The file will be in the current folder where you run the command +# The file will be in the current folder where you run the command # and you can check the checksum like this echo " gosec_vX.Y.Z_OS.tar.gz" | sha256sum -c - @@ -66,7 +66,7 @@ jobs: env: GO111MODULE: on steps: - - name: Checkout Source + - name: Checkout Source uses: actions/checkout@v2 - name: Run Gosec Security Scanner uses: securego/gosec@master @@ -74,10 +74,46 @@ jobs: args: ./... ``` +### Integrating with code scanning + +You can [integrate third-party code analysis tools](https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/integrating-with-code-scanning) with GitHub code scanning by uploading data as SARIF files. + +The workflow shows an example of running the `gosec` as a step in a GitHub action workflow which outputs the `results.sarif` file. The workflow then uploads the `results.sarif` file to GitHub using the `upload-sarif` action. + +```yaml +name: "Security Scan" + +# Run workflow each time code is pushed to your repository and on a schedule. +# The scheduled workflow runs every at 00:00 on Sunday UTC time. +on: + push: + schedule: + - cron: '0 0 * * 0' + +jobs: + tests: + runs-on: ubuntu-latest + env: + GO111MODULE: on + steps: + - name: Checkout Source + uses: actions/checkout@v2 + - name: Run Gosec Security Scanner + uses: securego/gosec@master + with: + # we let the report trigger content trigger a failure using the GitHub Security features. + args: '-no-fail -fmt sarif -out results.sarif ./...' + - name: Upload SARIF file + uses: github/codeql-action/upload-sarif@v1 + with: + # Path to SARIF file relative to the root of the repository + sarif_file: results.sarif +``` + ### Local Installation ```bash -go get github.com/securego/gosec/cmd/gosec +go get github.com/securego/gosec/v2/cmd/gosec ``` ## Usage @@ -107,18 +143,18 @@ directory you can supply `./...` as the input argument. - G302: Poor file permissions used with chmod - G303: Creating tempfile using a predictable path - G304: File path provided as taint input -- G305: File traversal when extracting zip archive +- G305: File traversal when extracting zip/tar archive - G306: Poor file permissions used when writing to a new file - G307: Deferring a method which returns an error - G401: Detect the usage of DES, RC4, MD5 or SHA1 - G402: Look for bad TLS connection settings - G403: Ensure minimum RSA key length of 2048 bits - G404: Insecure random number source (rand) -- G501: Import blacklist: crypto/md5 -- G502: Import blacklist: crypto/des -- G503: Import blacklist: crypto/rc4 -- G504: Import blacklist: net/http/cgi -- G505: Import blacklist: crypto/sha1 +- G501: Import blocklist: crypto/md5 +- G502: Import blocklist: crypto/des +- G503: Import blocklist: crypto/rc4 +- G504: Import blocklist: net/http/cgi +- G505: Import blocklist: crypto/sha1 - G601: Implicit memory aliasing of items from a range statement ### Retired rules @@ -139,7 +175,7 @@ $ gosec -exclude=G303 ./... ``` ### CWE Mapping -Every issue detected by `gosec` is mapped to a [CWE (Common Weakness Enumeration)](http://cwe.mitre.org/data/index.html) which describes in more generic terms the vulnerability. The exact mapping can be found [here](https://github.com/securego/gosec/blob/53be8dd8644ee48802114178cff6eb7e29757414/issue.go#L49). +Every issue detected by `gosec` is mapped to a [CWE (Common Weakness Enumeration)](http://cwe.mitre.org/data/index.html) which describes in more generic terms the vulnerability. The exact mapping can be found [here](https://github.com/securego/gosec/blob/master/issue.go#L49). ### Configuration @@ -161,7 +197,7 @@ A number of global settings can be provided in a configuration file as follows: # Run with a global configuration file $ gosec -conf config.json . ``` -Also some rules accept configuration. For instance on rule `G104`, it is possible to define packages along with a list +Also some rules accept configuration. For instance on rule `G104`, it is possible to define packages along with a list of functions which will be skipped when auditing the not checked errors: ```JSON @@ -178,22 +214,22 @@ You can also configure the hard-coded credentials rule `G101` with additional pa { "G101": { "pattern": "(?i)passwd|pass|password|pwd|secret|private_key|token", - "ingnore_entropy": false, + "ignore_entropy": false, "entropy_threshold": "80.0", "per_char_threshold": "3.0", - "trucate": "32" + "truncate": "32" } } ``` -### Dependencies +### Dependencies gosec will fetch automatically the dependencies of the code which is being analyzed when go module is turned on (e.g.` GO111MODULE=on`). If this is not the case, the dependencies need to be explicitly downloaded by running the `go get -d` command before the scan. ### Excluding test files and folders -gosec will ignore test files across all packages and any dependencies in your vendor directory. +gosec will ignore test files across all packages and any dependencies in your vendor directory. The scanning of test files can be enabled with the following flag: @@ -233,7 +269,7 @@ func main(){ ``` When a specific false positive has been identified and verified as safe, you may wish to suppress only that single rule (or a specific set of rules) -within a section of code, while continuing to scan for other problems. To do this, you can list the rule(s) to be suppressed within +within a section of code, while continuing to scan for other problems. To do this, you can list the rule(s) to be suppressed within the `#nosec` annotation, e.g: `/* #nosec G401 */` or `// #nosec G201 G202 G203` In some cases you may also want to revisit places where `#nosec` annotations @@ -300,12 +336,13 @@ You can also build locally the docker image by using the command: make image ``` -You can run the `gosec` tool in a container against your local Go project. You only have to mount the project +You can run the `gosec` tool in a container against your local Go project. You only have to mount the project into a volume as follows: ```bash -docker run -it -v /:/ securego/gosec //... +docker run --rm -it -w // -v /:/ securego/gosec //... ``` +**Note:** the current working directory needs to be set with `-w` option in order to get successfully resolved the dependencies from go module file ### Generate TLS rule @@ -314,7 +351,7 @@ The configuration of TLS rule can be generated from [Mozilla's TLS ciphers recom First you need to install the generator tool: ```bash -go get github.com/securego/gosec/cmd/tlsconfig/... +go get github.com/securego/gosec/v2/cmd/tlsconfig/... ``` You can invoke now the `go generate` in the root of the project: @@ -324,3 +361,7 @@ go generate ./... ``` This will generate the `rules/tls_config.go` file which will contain the current ciphers recommendation from Mozilla. + +## Who is using gosec? + +This is a [list](USERS.md) with some of the gosec's users. diff --git a/vendor/github.com/securego/gosec/v2/USERS.md b/vendor/github.com/securego/gosec/v2/USERS.md new file mode 100644 index 0000000..eac13d0 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/USERS.md @@ -0,0 +1,26 @@ +# Users + +This is a list of gosec's users. Please send a pull request with your organisation or project name if you are using gosec. + +## Companies + +1. [Gitlab](https://docs.gitlab.com/ee/user/application_security/sast/) +2. [CloudBees](https://cloudbees.com) +3. [VMware](https://www.vmware.com) +4. [Codacy](https://support.codacy.com/hc/en-us/articles/213632009-Engines) +5. [Coinbase](https://github.com/coinbase/watchdog/blob/master/Makefile#L12) +6. [RedHat/OpenShift](https://github.com/openshift/openshift-azure) +7. [Guardalis](https://www.guardrails.io/) +8. [1Password](https://github.com/1Password/srp) +9. [PingCAP/tidb](https://github.com/pingcap/tidb) + +## Projects + +1. [golangci-lint](https://github.com/golangci/golangci-lint) +2. [Kubenetes](https://github.com/kubernetes/kubernetes) (via golangci) +3. [caddy](https://github.com/caddyserver/caddy) (via golangci) +4. [Jenkins X](https://github.com/jenkins-x/jx/blob/bdc51840a41b75776159c1c7b7faa1cf477be473/hack/linter.sh#L25) +5. [HuskyCI](https://huskyci.opensource.globo.com/) +6. [GolangCI](https://golangci.com/) +7. [semgrep.live](https://semgrep.live/) +8. [gofiber](https://github.com/gofiber/fiber) diff --git a/vendor/github.com/securego/gosec/v2/analyzer.go b/vendor/github.com/securego/gosec/v2/analyzer.go index ca4440c..d4aae3a 100644 --- a/vendor/github.com/securego/gosec/v2/analyzer.go +++ b/vendor/github.com/securego/gosec/v2/analyzer.go @@ -125,7 +125,12 @@ func (gosec *Analyzer) LoadRules(ruleDefinitions map[string]RuleBuilder) { // Process kicks off the analysis process for a given package func (gosec *Analyzer) Process(buildTags []string, packagePaths ...string) error { - config := gosec.pkgConfig(buildTags) + config := &packages.Config{ + Mode: LoadMode, + BuildFlags: buildTags, + Tests: gosec.tests, + } + for _, pkgPath := range packagePaths { pkgs, err := gosec.load(pkgPath, config) if err != nil { @@ -145,19 +150,6 @@ func (gosec *Analyzer) Process(buildTags []string, packagePaths ...string) error return nil } -func (gosec *Analyzer) pkgConfig(buildTags []string) *packages.Config { - flags := []string{} - if len(buildTags) > 0 { - tagsFlag := "-tags=" + strings.Join(buildTags, " ") - flags = append(flags, tagsFlag) - } - return &packages.Config{ - Mode: LoadMode, - BuildFlags: flags, - Tests: gosec.tests, - } -} - func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages.Package, error) { abspath, err := GetPkgAbsPath(pkgPath) if err != nil { @@ -166,7 +158,11 @@ func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages. } gosec.logger.Println("Import directory:", abspath) - basePackage, err := build.Default.ImportDir(pkgPath, build.ImportComment) + // step 1/3 create build context. + buildD := build.Default + // step 2/3: add build tags to get env dependent files into basePackage. + buildD.BuildTags = conf.BuildFlags + basePackage, err := buildD.ImportDir(pkgPath, build.ImportComment) if err != nil { return []*packages.Package{}, fmt.Errorf("importing dir %q: %v", pkgPath, err) } @@ -188,6 +184,8 @@ func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages. } } + // step 3/3 remove build tags from conf to proceed build correctly. + conf.BuildFlags = nil pkgs, err := packages.Load(conf, packageFiles...) if err != nil { return []*packages.Package{}, fmt.Errorf("loading files from package %q: %v", pkgPath, err) diff --git a/vendor/github.com/securego/gosec/v2/call_list.go b/vendor/github.com/securego/gosec/v2/call_list.go index 115c6c8..4b3fcf0 100644 --- a/vendor/github.com/securego/gosec/v2/call_list.go +++ b/vendor/github.com/securego/gosec/v2/call_list.go @@ -70,7 +70,7 @@ func (c CallList) ContainsPointer(selector, indent string) bool { } // ContainsPkgCallExpr resolves the call expression name and type, and then further looks -// up the package path for that type. Finally, it determines if the call exists within the call list +// up the package path for that type. Finally, it determines if the call exists within the call list func (c CallList) ContainsPkgCallExpr(n ast.Node, ctx *Context, stripVendor bool) *ast.CallExpr { selector, ident, err := GetCallInfo(n, ctx) if err != nil { diff --git a/vendor/github.com/securego/gosec/v2/entrypoint.sh b/vendor/github.com/securego/gosec/v2/entrypoint.sh new file mode 100644 index 0000000..4dc0467 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/entrypoint.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +# Expand the arguments into an array of strings. This is requires because the GitHub action +# provides all arguments concatenated as a single string. +ARGS=("$@") + +/bin/gosec ${ARGS[*]} diff --git a/vendor/github.com/securego/gosec/v2/go.mod b/vendor/github.com/securego/gosec/v2/go.mod index edfa343..f09e262 100644 --- a/vendor/github.com/securego/gosec/v2/go.mod +++ b/vendor/github.com/securego/gosec/v2/go.mod @@ -2,18 +2,17 @@ module github.com/securego/gosec/v2 require ( github.com/davecgh/go-spew v1.1.1 // indirect - github.com/golang/protobuf v1.3.2 // indirect - github.com/gookit/color v1.2.4 + github.com/gookit/color v1.3.6 github.com/kr/pretty v0.1.0 // indirect - github.com/mozilla/tls-observatory v0.0.0-20200317151703-4fa42e1c2dee - github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d - github.com/onsi/ginkgo v1.12.0 - github.com/onsi/gomega v1.9.0 + github.com/lib/pq v1.9.0 // indirect + github.com/mozilla/tls-observatory v0.0.0-20201209171846-0547674fceff + github.com/nbutton23/zxcvbn-go v0.0.0-20201221231540-e56b841a3c88 + github.com/onsi/ginkgo v1.14.2 + github.com/onsi/gomega v1.10.4 github.com/stretchr/testify v1.4.0 // indirect - golang.org/x/text v0.3.2 // indirect - golang.org/x/tools v0.0.0-20200331202046-9d5940d49312 + golang.org/x/tools v0.0.0-20210102185154-773b96fafca2 gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect - gopkg.in/yaml.v2 v2.2.8 + gopkg.in/yaml.v2 v2.4.0 ) go 1.14 diff --git a/vendor/github.com/securego/gosec/v2/go.sum b/vendor/github.com/securego/gosec/v2/go.sum index fff56a3..c5fcee8 100644 --- a/vendor/github.com/securego/gosec/v2/go.sum +++ b/vendor/github.com/securego/gosec/v2/go.sum @@ -3,12 +3,23 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/gookit/color v1.2.4 h1:xOYBan3Fwlrqj1M1UN2TlHOCRiek3bGzWf/vPnJ1roE= -github.com/gookit/color v1.2.4/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/gookit/color v1.3.6 h1:Rgbazd4JO5AgSTVGS3o0nvaSdwdrS8bzvIXwtK6OiMk= +github.com/gookit/color v1.3.6/go.mod h1:R3ogXq2B9rTbXoSHJ1HyUVAZ3poOJHpd9nQmyGZsfvQ= github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= @@ -16,57 +27,87 @@ github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORN github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/mozilla/tls-observatory v0.0.0-20200317151703-4fa42e1c2dee h1:1xJ+Xi9lYWLaaP4yB67ah0+548CD3110mCPWhVVjFkI= -github.com/mozilla/tls-observatory v0.0.0-20200317151703-4fa42e1c2dee/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk= -github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E= -github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU= +github.com/lib/pq v1.9.0 h1:L8nSXQQzAYByakOFMTwpjRoHsMJklur4Gi59b6VivR8= +github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mozilla/tls-observatory v0.0.0-20201209171846-0547674fceff h1:1l3C92dKs28p0T3Abeem2JDPbtQgEWyNVzflHmyrAwU= +github.com/mozilla/tls-observatory v0.0.0-20201209171846-0547674fceff/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk= +github.com/nbutton23/zxcvbn-go v0.0.0-20201221231540-e56b841a3c88 h1:o+O3Cd1HO9CTgxE3/C8p5I5Y4C0yYWbF8d4IkfOLtcQ= +github.com/nbutton23/zxcvbn-go v0.0.0-20201221231540-e56b841a3c88/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= +github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.0 h1:Iw5WCbBcaAAd0fpRb1c9r5YCylv4XDoCSigm1zLevwU= -github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.14.2 h1:8mVmC9kjFFmA8H4pKMUhcblgifdkOIXPvbhN1T36q1M= +github.com/onsi/ginkgo v1.14.2/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= github.com/onsi/gomega v1.7.1 h1:K0jcRCwNQM3vFGh1ppMtDh/+7ApJrjldlX8fA0jDTLQ= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.9.0 h1:R1uwffexN6Pr340GtYRIdZmAiN4J+iw6WG4wog1DUXg= -github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= +github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.10.4 h1:NiTx7EEvBzu9sFOD1zORteLSt3o8gnlvZZwSE9TnY9U= +github.com/onsi/gomega v1.10.4/go.mod h1:g/HbgYopi++010VEqkFgJHKC09uJiW9UkXvMUuKHUCQ= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550 h1:ObdrDkeb4kJdCP557AjRjq69pTHfNouLtWZG7j9rPN8= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b h1:0mm1VjtFUOIlE1SbDlwjYaDxZVDP2S5ou6y0gSgXHu8= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb h1:eBmm0M9fYhWpKZLjQUUKka/LtIxf46G4fxeEz5KJr9U= +golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e h1:N7DeIrjYszNmSW409R3frPPwglRwMkXSBzwVbkOjLLA= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299 h1:DYfZAGf2WMFjMxbgTjaC+2HC7NkNAQs+6Q8b9WEB/F4= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200331202046-9d5940d49312 h1:2PHG+Ia3gK1K2kjxZnSylizb//eyaMG8gDFbOG7wLV8= -golang.org/x/tools v0.0.0-20200331202046-9d5940d49312/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210102185154-773b96fafca2 h1:crjwvdT+rSAILpNOKhk/BNmefsucqGTeeRX2YBK/6Jg= +golang.org/x/tools v0.0.0-20210102185154-773b96fafca2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 h1:/atklqdjdhuosWIl6AIbOeHJjicWYPqR9bpxqxYG2pA= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.23.0 h1:4MY060fB1DLGMB/7MBTLnwQUY6+F09GEiz6SsrNqyzM= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -78,5 +119,7 @@ gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= diff --git a/vendor/github.com/securego/gosec/v2/helpers.go b/vendor/github.com/securego/gosec/v2/helpers.go index 40dc8e9..83dfa29 100644 --- a/vendor/github.com/securego/gosec/v2/helpers.go +++ b/vendor/github.com/securego/gosec/v2/helpers.go @@ -37,7 +37,6 @@ import ( // node, matched := MatchCallByPackage(n, ctx, "math/rand", "Read") // func MatchCallByPackage(n ast.Node, c *Context, pkg string, names ...string) (*ast.CallExpr, bool) { - importedName, found := GetImportedName(pkg, c) if !found { return nil, false @@ -226,6 +225,27 @@ func GetIdentStringValues(ident *ast.Ident) []string { return values } +// GetBinaryExprOperands returns all operands of a binary expression by traversing +// the expression tree +func GetBinaryExprOperands(be *ast.BinaryExpr) []ast.Node { + var traverse func(be *ast.BinaryExpr) + result := []ast.Node{} + traverse = func(be *ast.BinaryExpr) { + if lhs, ok := be.X.(*ast.BinaryExpr); ok { + traverse(lhs) + } else { + result = append(result, be.X) + } + if rhs, ok := be.Y.(*ast.BinaryExpr); ok { + traverse(rhs) + } else { + result = append(result, be.Y) + } + } + traverse(be) + return result +} + // GetImportedName returns the name used for the package within the // code. It will resolve aliases and ignores initialization only imports. func GetImportedName(path string, ctx *Context) (string, bool) { diff --git a/vendor/github.com/securego/gosec/v2/issue.go b/vendor/github.com/securego/gosec/v2/issue.go index 28ad726..aa58c34 100644 --- a/vendor/github.com/securego/gosec/v2/issue.go +++ b/vendor/github.com/securego/gosec/v2/issue.go @@ -15,9 +15,12 @@ package gosec import ( + "bufio" + "bytes" "encoding/json" "fmt" "go/ast" + "go/token" "os" "strconv" ) @@ -34,6 +37,10 @@ const ( High ) +// SnippetOffset defines the number of lines captured before +// the beginning and after the end of a code snippet +const SnippetOffset = 1 + // Cwe id and url type Cwe struct { ID string @@ -53,6 +60,7 @@ var IssueToCWE = map[string]Cwe{ "G104": GetCwe("703"), "G106": GetCwe("322"), "G107": GetCwe("88"), + "G108": GetCwe("200"), "G109": GetCwe("190"), "G110": GetCwe("409"), "G201": GetCwe("89"), @@ -64,6 +72,8 @@ var IssueToCWE = map[string]Cwe{ "G303": GetCwe("377"), "G304": GetCwe("22"), "G305": GetCwe("22"), + "G306": GetCwe("276"), + "G307": GetCwe("703"), "G401": GetCwe("326"), "G402": GetCwe("295"), "G403": GetCwe("310"), @@ -73,6 +83,7 @@ var IssueToCWE = map[string]Cwe{ "G503": GetCwe("327"), "G504": GetCwe("327"), "G505": GetCwe("327"), + "G601": GetCwe("118"), } // Issue is returned by a gosec rule if it discovers an issue with the scanned code. @@ -120,43 +131,56 @@ func (c Score) String() string { return "UNDEFINED" } +// codeSnippet extracts a code snippet based on the ast reference func codeSnippet(file *os.File, start int64, end int64, n ast.Node) (string, error) { if n == nil { - return "", fmt.Errorf("Invalid AST node provided") + return "", fmt.Errorf("invalid AST node provided") } - - size := (int)(end - start) // Go bug, os.File.Read should return int64 ... - _, err := file.Seek(start, 0) // #nosec - if err != nil { - return "", fmt.Errorf("move to the beginning of file: %v", err) + var pos int64 + var buf bytes.Buffer + scanner := bufio.NewScanner(file) + scanner.Split(bufio.ScanLines) + for scanner.Scan() { + pos++ + if pos > end { + break + } else if pos >= start && pos <= end { + code := fmt.Sprintf("%d: %s\n", pos, scanner.Text()) + buf.WriteString(code) + } } + return buf.String(), nil +} - buf := make([]byte, size) - if nread, err := file.Read(buf); err != nil || nread != size { - return "", fmt.Errorf("Unable to read code") +func codeSnippetStartLine(node ast.Node, fobj *token.File) int64 { + s := (int64)(fobj.Line(node.Pos())) + if s-SnippetOffset > 0 { + return s - SnippetOffset } - return string(buf), nil + return s +} + +func codeSnippetEndLine(node ast.Node, fobj *token.File) int64 { + e := (int64)(fobj.Line(node.End())) + return e + SnippetOffset } // NewIssue creates a new Issue func NewIssue(ctx *Context, node ast.Node, ruleID, desc string, severity Score, confidence Score) *Issue { - var code string fobj := ctx.FileSet.File(node.Pos()) name := fobj.Name() - start, end := fobj.Line(node.Pos()), fobj.Line(node.End()) line := strconv.Itoa(start) if start != end { line = fmt.Sprintf("%d-%d", start, end) } - col := strconv.Itoa(fobj.Position(node.Pos()).Column) - // #nosec + var code string if file, err := os.Open(fobj.Name()); err == nil { - defer file.Close() - s := (int64)(fobj.Position(node.Pos()).Offset) // Go bug, should be int64 - e := (int64)(fobj.Position(node.End()).Offset) // Go bug, should be int64 + defer file.Close() // #nosec + s := codeSnippetStartLine(node, fobj) + e := codeSnippetEndLine(node, fobj) code, err = codeSnippet(file, s, e, node) if err != nil { code = err.Error() diff --git a/vendor/github.com/securego/gosec/v2/rules/archive.go b/vendor/github.com/securego/gosec/v2/rules/archive.go index ca7a46e..92c7e44 100644 --- a/vendor/github.com/securego/gosec/v2/rules/archive.go +++ b/vendor/github.com/securego/gosec/v2/rules/archive.go @@ -9,15 +9,15 @@ import ( type archive struct { gosec.MetaData - calls gosec.CallList - argType string + calls gosec.CallList + argTypes []string } func (a *archive) ID() string { return a.MetaData.ID } -// Match inspects AST nodes to determine if the filepath.Joins uses any argument derived from type zip.File +// Match inspects AST nodes to determine if the filepath.Joins uses any argument derived from type zip.File or tar.Header func (a *archive) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { if node := a.calls.ContainsPkgCallExpr(n, c, false); node != nil { for _, arg := range node.Args { @@ -35,26 +35,31 @@ func (a *archive) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { } } - if argType != nil && argType.String() == a.argType { - return gosec.NewIssue(c, n, a.ID(), a.What, a.Severity, a.Confidence), nil + if argType != nil { + for _, t := range a.argTypes { + if argType.String() == t { + return gosec.NewIssue(c, n, a.ID(), a.What, a.Severity, a.Confidence), nil + } + } } } } return nil, nil } -// NewArchive creates a new rule which detects the file traversal when extracting zip archives +// NewArchive creates a new rule which detects the file traversal when extracting zip/tar archives func NewArchive(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { calls := gosec.NewCallList() calls.Add("path/filepath", "Join") + calls.Add("path", "Join") return &archive{ - calls: calls, - argType: "*archive/zip.File", + calls: calls, + argTypes: []string{"*archive/zip.File", "*archive/tar.Header"}, MetaData: gosec.MetaData{ ID: id, Severity: gosec.Medium, Confidence: gosec.High, - What: "File traversal when extracting zip archive", + What: "File traversal when extracting zip/tar archive", }, }, []ast.Node{(*ast.CallExpr)(nil)} } diff --git a/vendor/github.com/securego/gosec/v2/rules/bad_defer.go b/vendor/github.com/securego/gosec/v2/rules/bad_defer.go index 3c35880..b33a047 100644 --- a/vendor/github.com/securego/gosec/v2/rules/bad_defer.go +++ b/vendor/github.com/securego/gosec/v2/rules/bad_defer.go @@ -40,7 +40,7 @@ func (r *badDefer) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { for _, deferTyp := range r.types { if typ, method, err := gosec.GetCallInfo(deferStmt.Call, c); err == nil { if normalize(typ) == deferTyp.typ && contains(deferTyp.methods, method) { - return gosec.NewIssue(c, n, r.ID(), fmt.Sprintf(r.What, typ, method), r.Severity, r.Confidence), nil + return gosec.NewIssue(c, n, r.ID(), fmt.Sprintf(r.What, method, typ), r.Severity, r.Confidence), nil } } } diff --git a/vendor/github.com/securego/gosec/v2/rules/blacklist.go b/vendor/github.com/securego/gosec/v2/rules/blocklist.go similarity index 50% rename from vendor/github.com/securego/gosec/v2/rules/blacklist.go rename to vendor/github.com/securego/gosec/v2/rules/blocklist.go index 9bb7338..afd4ee5 100644 --- a/vendor/github.com/securego/gosec/v2/rules/blacklist.go +++ b/vendor/github.com/securego/gosec/v2/rules/blocklist.go @@ -21,9 +21,9 @@ import ( "github.com/securego/gosec/v2" ) -type blacklistedImport struct { +type blocklistedImport struct { gosec.MetaData - Blacklisted map[string]string + Blocklisted map[string]string } func unquote(original string) string { @@ -32,63 +32,63 @@ func unquote(original string) string { return strings.TrimRight(copy, `"`) } -func (r *blacklistedImport) ID() string { +func (r *blocklistedImport) ID() string { return r.MetaData.ID } -func (r *blacklistedImport) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (r *blocklistedImport) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { if node, ok := n.(*ast.ImportSpec); ok { - if description, ok := r.Blacklisted[unquote(node.Path.Value)]; ok { + if description, ok := r.Blocklisted[unquote(node.Path.Value)]; ok { return gosec.NewIssue(c, node, r.ID(), description, r.Severity, r.Confidence), nil } } return nil, nil } -// NewBlacklistedImports reports when a blacklisted import is being used. +// NewBlocklistedImports reports when a blocklisted import is being used. // Typically when a deprecated technology is being used. -func NewBlacklistedImports(id string, conf gosec.Config, blacklist map[string]string) (gosec.Rule, []ast.Node) { - return &blacklistedImport{ +func NewBlocklistedImports(id string, conf gosec.Config, blocklist map[string]string) (gosec.Rule, []ast.Node) { + return &blocklistedImport{ MetaData: gosec.MetaData{ ID: id, Severity: gosec.Medium, Confidence: gosec.High, }, - Blacklisted: blacklist, + Blocklisted: blocklist, }, []ast.Node{(*ast.ImportSpec)(nil)} } -// NewBlacklistedImportMD5 fails if MD5 is imported -func NewBlacklistedImportMD5(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlacklistedImports(id, conf, map[string]string{ - "crypto/md5": "Blacklisted import crypto/md5: weak cryptographic primitive", +// NewBlocklistedImportMD5 fails if MD5 is imported +func NewBlocklistedImportMD5(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "crypto/md5": "Blocklisted import crypto/md5: weak cryptographic primitive", }) } -// NewBlacklistedImportDES fails if DES is imported -func NewBlacklistedImportDES(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlacklistedImports(id, conf, map[string]string{ - "crypto/des": "Blacklisted import crypto/des: weak cryptographic primitive", +// NewBlocklistedImportDES fails if DES is imported +func NewBlocklistedImportDES(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "crypto/des": "Blocklisted import crypto/des: weak cryptographic primitive", }) } -// NewBlacklistedImportRC4 fails if DES is imported -func NewBlacklistedImportRC4(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlacklistedImports(id, conf, map[string]string{ - "crypto/rc4": "Blacklisted import crypto/rc4: weak cryptographic primitive", +// NewBlocklistedImportRC4 fails if DES is imported +func NewBlocklistedImportRC4(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "crypto/rc4": "Blocklisted import crypto/rc4: weak cryptographic primitive", }) } -// NewBlacklistedImportCGI fails if CGI is imported -func NewBlacklistedImportCGI(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlacklistedImports(id, conf, map[string]string{ - "net/http/cgi": "Blacklisted import net/http/cgi: Go versions < 1.6.3 are vulnerable to Httpoxy attack: (CVE-2016-5386)", +// NewBlocklistedImportCGI fails if CGI is imported +func NewBlocklistedImportCGI(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "net/http/cgi": "Blocklisted import net/http/cgi: Go versions < 1.6.3 are vulnerable to Httpoxy attack: (CVE-2016-5386)", }) } -// NewBlacklistedImportSHA1 fails if SHA1 is imported -func NewBlacklistedImportSHA1(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlacklistedImports(id, conf, map[string]string{ - "crypto/sha1": "Blacklisted import crypto/sha1: weak cryptographic primitive", +// NewBlocklistedImportSHA1 fails if SHA1 is imported +func NewBlocklistedImportSHA1(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "crypto/sha1": "Blocklisted import crypto/sha1: weak cryptographic primitive", }) } diff --git a/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go b/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go index bfc5897..02256fa 100644 --- a/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go +++ b/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go @@ -95,6 +95,7 @@ func NewDecompressionBombCheck(id string, conf gosec.Config) (gosec.Rule, []ast. copyCalls := gosec.NewCallList() copyCalls.Add("io", "Copy") + copyCalls.Add("io", "CopyBuffer") return &decompressionBombCheck{ MetaData: gosec.MetaData{ diff --git a/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go b/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go index 65c7ae3..b2668de 100644 --- a/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go +++ b/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go @@ -1,10 +1,10 @@ package rules import ( - "fmt" - "github.com/securego/gosec/v2" "go/ast" "go/token" + + "github.com/securego/gosec/v2" ) type implicitAliasing struct { @@ -33,20 +33,23 @@ func (r *implicitAliasing) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, er // When presented with a range statement, get the underlying Object bound to // by assignment and add it to our set (r.aliases) of objects to check for. if key, ok := node.Value.(*ast.Ident); ok { - if assignment, ok := key.Obj.Decl.(*ast.AssignStmt); ok { - if len(assignment.Lhs) < 2 { - return nil, nil - } + if key.Obj != nil { + if assignment, ok := key.Obj.Decl.(*ast.AssignStmt); ok { + if len(assignment.Lhs) < 2 { + return nil, nil + } - if object, ok := assignment.Lhs[1].(*ast.Ident); ok { - r.aliases[object.Obj] = struct{}{} + if object, ok := assignment.Lhs[1].(*ast.Ident); ok { + r.aliases[object.Obj] = struct{}{} - if r.rightBrace < node.Body.Rbrace { - r.rightBrace = node.Body.Rbrace + if r.rightBrace < node.Body.Rbrace { + r.rightBrace = node.Body.Rbrace + } } } } } + case *ast.UnaryExpr: // If this unary expression is outside of the last range statement we were looking at // then clear the list of objects we're concerned about because they're no longer in @@ -95,7 +98,7 @@ func NewImplicitAliasing(id string, conf gosec.Config) (gosec.Rule, []ast.Node) ID: id, Severity: gosec.Medium, Confidence: gosec.Medium, - What: fmt.Sprintf("Implicit memory aliasing in for loop."), + What: "Implicit memory aliasing in for loop.", }, }, []ast.Node{(*ast.RangeStmt)(nil), (*ast.UnaryExpr)(nil), (*ast.ReturnStmt)(nil)} } diff --git a/vendor/github.com/securego/gosec/v2/rules/rand.go b/vendor/github.com/securego/gosec/v2/rules/rand.go index 08c28fc..bf86b76 100644 --- a/vendor/github.com/securego/gosec/v2/rules/rand.go +++ b/vendor/github.com/securego/gosec/v2/rules/rand.go @@ -43,7 +43,8 @@ func (w *weakRand) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { // NewWeakRandCheck detects the use of random number generator that isn't cryptographically secure func NewWeakRandCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { return &weakRand{ - funcNames: []string{"Read", "Int"}, + funcNames: []string{"New", "Read", "Float32", "Float64", "Int", "Int31", + "Int31n", "Int63", "Int63n", "Intn", "NormalFloat64", "Uint32", "Uint64"}, packagePath: "math/rand", MetaData: gosec.MetaData{ ID: id, diff --git a/vendor/github.com/securego/gosec/v2/rules/readfile.go b/vendor/github.com/securego/gosec/v2/rules/readfile.go index a52f742..072b016 100644 --- a/vendor/github.com/securego/gosec/v2/rules/readfile.go +++ b/vendor/github.com/securego/gosec/v2/rules/readfile.go @@ -25,6 +25,7 @@ type readfile struct { gosec.MetaData gosec.CallList pathJoin gosec.CallList + clean gosec.CallList } // ID returns the identifier for this rule @@ -56,6 +57,21 @@ func (r *readfile) isJoinFunc(n ast.Node, c *gosec.Context) bool { return false } +// isFilepathClean checks if there is a filepath.Clean before assigning to a variable +func (r *readfile) isFilepathClean(n *ast.Ident, c *gosec.Context) bool { + if n.Obj.Kind != ast.Var { + return false + } + if node, ok := n.Obj.Decl.(*ast.AssignStmt); ok { + if call, ok := node.Rhs[0].(*ast.CallExpr); ok { + if clean := r.clean.ContainsPkgCallExpr(call, c, false); clean != nil { + return true + } + } + } + return false +} + // Match inspects AST nodes to determine if the match the methods `os.Open` or `ioutil.ReadFile` func (r *readfile) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { if node := r.ContainsPkgCallExpr(n, c, false); node != nil { @@ -77,7 +93,9 @@ func (r *readfile) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { if ident, ok := arg.(*ast.Ident); ok { obj := c.Info.ObjectOf(ident) - if _, ok := obj.(*types.Var); ok && !gosec.TryResolve(ident, c) { + if _, ok := obj.(*types.Var); ok && + !gosec.TryResolve(ident, c) && + !r.isFilepathClean(ident, c) { return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil } } @@ -90,6 +108,7 @@ func (r *readfile) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { func NewReadFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { rule := &readfile{ pathJoin: gosec.NewCallList(), + clean: gosec.NewCallList(), CallList: gosec.NewCallList(), MetaData: gosec.MetaData{ ID: id, @@ -100,7 +119,10 @@ func NewReadFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { } rule.pathJoin.Add("path/filepath", "Join") rule.pathJoin.Add("path", "Join") + rule.clean.Add("path/filepath", "Clean") + rule.clean.Add("path/filepath", "Rel") rule.Add("io/ioutil", "ReadFile") rule.Add("os", "Open") + rule.Add("os", "OpenFile") return rule, []ast.Node{(*ast.CallExpr)(nil)} } diff --git a/vendor/github.com/securego/gosec/v2/rules/rulelist.go b/vendor/github.com/securego/gosec/v2/rules/rulelist.go index 06e1dfb..a3d9ca2 100644 --- a/vendor/github.com/securego/gosec/v2/rules/rulelist.go +++ b/vendor/github.com/securego/gosec/v2/rules/rulelist.go @@ -90,12 +90,12 @@ func Generate(filters ...RuleFilter) RuleList { {"G403", "Ensure minimum RSA key length of 2048 bits", NewWeakKeyStrength}, {"G404", "Insecure random number source (rand)", NewWeakRandCheck}, - // blacklist - {"G501", "Import blacklist: crypto/md5", NewBlacklistedImportMD5}, - {"G502", "Import blacklist: crypto/des", NewBlacklistedImportDES}, - {"G503", "Import blacklist: crypto/rc4", NewBlacklistedImportRC4}, - {"G504", "Import blacklist: net/http/cgi", NewBlacklistedImportCGI}, - {"G505", "Import blacklist: crypto/sha1", NewBlacklistedImportSHA1}, + // blocklist + {"G501", "Import blocklist: crypto/md5", NewBlocklistedImportMD5}, + {"G502", "Import blocklist: crypto/des", NewBlocklistedImportDES}, + {"G503", "Import blocklist: crypto/rc4", NewBlocklistedImportRC4}, + {"G504", "Import blocklist: net/http/cgi", NewBlocklistedImportCGI}, + {"G505", "Import blocklist: crypto/sha1", NewBlocklistedImportSHA1}, // memory safety {"G601", "Implicit memory aliasing in RangeStmt", NewImplicitAliasing}, diff --git a/vendor/github.com/securego/gosec/v2/rules/sql.go b/vendor/github.com/securego/gosec/v2/rules/sql.go index 3279a34..127dec5 100644 --- a/vendor/github.com/securego/gosec/v2/rules/sql.go +++ b/vendor/github.com/securego/gosec/v2/rules/sql.go @@ -17,12 +17,14 @@ package rules import ( "go/ast" "regexp" + "strings" "github.com/securego/gosec/v2" ) type sqlStatement struct { gosec.MetaData + gosec.CallList // Contains a list of patterns which must all match for the rule to match. patterns []*regexp.Regexp @@ -65,33 +67,65 @@ func (s *sqlStrConcat) checkObject(n *ast.Ident, c *gosec.Context) bool { return false } -// Look for "SELECT * FROM table WHERE " + " ' OR 1=1" -func (s *sqlStrConcat) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { - if node, ok := n.(*ast.BinaryExpr); ok { - if start, ok := node.X.(*ast.BasicLit); ok { +// checkQuery verifies if the query parameters is a string concatenation +func (s *sqlStrConcat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*gosec.Issue, error) { + _, fnName, err := gosec.GetCallInfo(call, ctx) + if err != nil { + return nil, err + } + var query ast.Node + if strings.HasSuffix(fnName, "Context") { + query = call.Args[1] + } else { + query = call.Args[0] + } + + if be, ok := query.(*ast.BinaryExpr); ok { + operands := gosec.GetBinaryExprOperands(be) + if start, ok := operands[0].(*ast.BasicLit); ok { if str, e := gosec.GetString(start); e == nil { if !s.MatchPatterns(str) { return nil, nil } - if _, ok := node.Y.(*ast.BasicLit); ok { - return nil, nil // string cat OK + } + for _, op := range operands[1:] { + if _, ok := op.(*ast.BasicLit); ok { + continue } - if second, ok := node.Y.(*ast.Ident); ok && s.checkObject(second, c) { - return nil, nil + if op, ok := op.(*ast.Ident); ok && s.checkObject(op, ctx) { + continue } - return gosec.NewIssue(c, n, s.ID(), s.What, s.Severity, s.Confidence), nil + return gosec.NewIssue(ctx, be, s.ID(), s.What, s.Severity, s.Confidence), nil } } } + + return nil, nil +} + +// Checks SQL query concatenation issues such as "SELECT * FROM table WHERE " + " ' OR 1=1" +func (s *sqlStrConcat) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { + switch stmt := n.(type) { + case *ast.AssignStmt: + for _, expr := range stmt.Rhs { + if sqlQueryCall, ok := expr.(*ast.CallExpr); ok && s.ContainsCallExpr(expr, ctx) != nil { + return s.checkQuery(sqlQueryCall, ctx) + } + } + case *ast.ExprStmt: + if sqlQueryCall, ok := stmt.X.(*ast.CallExpr); ok && s.ContainsCallExpr(stmt.X, ctx) != nil { + return s.checkQuery(sqlQueryCall, ctx) + } + } return nil, nil } // NewSQLStrConcat looks for cases where we are building SQL strings via concatenation func NewSQLStrConcat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return &sqlStrConcat{ + rule := &sqlStrConcat{ sqlStatement: sqlStatement{ patterns: []*regexp.Regexp{ - regexp.MustCompile(`(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) `), + regexp.MustCompile(`(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) `), }, MetaData: gosec.MetaData{ ID: id, @@ -99,13 +133,19 @@ func NewSQLStrConcat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { Confidence: gosec.High, What: "SQL string concatenation", }, + CallList: gosec.NewCallList(), }, - }, []ast.Node{(*ast.BinaryExpr)(nil)} + } + + rule.AddAll("*database/sql.DB", "Query", "QueryContext", "QueryRow", "QueryRowContext") + rule.AddAll("*database/sql.Tx", "Query", "QueryContext", "QueryRow", "QueryRowContext") + return rule, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)} } type sqlStrFormat struct { + gosec.CallList sqlStatement - calls gosec.CallList + fmtCalls gosec.CallList noIssue gosec.CallList noIssueQuoted gosec.CallList } @@ -130,14 +170,37 @@ func (s *sqlStrFormat) constObject(e ast.Expr, c *gosec.Context) bool { return false } -// Looks for "fmt.Sprintf("SELECT * FROM foo where '%s', userInput)" -func (s *sqlStrFormat) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (s *sqlStrFormat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*gosec.Issue, error) { + _, fnName, err := gosec.GetCallInfo(call, ctx) + if err != nil { + return nil, err + } + var query ast.Node + if strings.HasSuffix(fnName, "Context") { + query = call.Args[1] + } else { + query = call.Args[0] + } + + if ident, ok := query.(*ast.Ident); ok && ident.Obj != nil { + decl := ident.Obj.Decl + if assign, ok := decl.(*ast.AssignStmt); ok { + for _, expr := range assign.Rhs { + issue, err := s.checkFormatting(expr, ctx) + if issue != nil { + return issue, err + } + } + } + } + return nil, nil +} + +func (s *sqlStrFormat) checkFormatting(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { // argIndex changes the function argument which gets matched to the regex argIndex := 0 - - // TODO(gm) improve confidence if database/sql is being used - if node := s.calls.ContainsPkgCallExpr(n, c, false); node != nil { + if node := s.fmtCalls.ContainsPkgCallExpr(n, ctx, false); node != nil { // if the function is fmt.Fprintf, search for SQL statement in Args[1] instead if sel, ok := node.Fun.(*ast.SelectorExpr); ok { if sel.Sel.Name == "Fprintf" { @@ -177,7 +240,7 @@ func (s *sqlStrFormat) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) if argIndex+1 < len(node.Args) { allSafe := true for _, arg := range node.Args[argIndex+1:] { - if n := s.noIssueQuoted.ContainsPkgCallExpr(arg, c, true); n == nil && !s.constObject(arg, c) { + if n := s.noIssueQuoted.ContainsPkgCallExpr(arg, ctx, true); n == nil && !s.constObject(arg, ctx) { allSafe = false break } @@ -187,7 +250,24 @@ func (s *sqlStrFormat) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) } } if s.MatchPatterns(formatter) { - return gosec.NewIssue(c, n, s.ID(), s.What, s.Severity, s.Confidence), nil + return gosec.NewIssue(ctx, n, s.ID(), s.What, s.Severity, s.Confidence), nil + } + } + return nil, nil +} + +// Check SQL query formatting issues such as "fmt.Sprintf("SELECT * FROM foo where '%s', userInput)" +func (s *sqlStrFormat) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { + switch stmt := n.(type) { + case *ast.AssignStmt: + for _, expr := range stmt.Rhs { + if sqlQueryCall, ok := expr.(*ast.CallExpr); ok && s.ContainsCallExpr(expr, ctx) != nil { + return s.checkQuery(sqlQueryCall, ctx) + } + } + case *ast.ExprStmt: + if sqlQueryCall, ok := stmt.X.(*ast.CallExpr); ok && s.ContainsCallExpr(stmt.X, ctx) != nil { + return s.checkQuery(sqlQueryCall, ctx) } } return nil, nil @@ -196,12 +276,13 @@ func (s *sqlStrFormat) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) // NewSQLStrFormat looks for cases where we're building SQL query strings using format strings func NewSQLStrFormat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { rule := &sqlStrFormat{ - calls: gosec.NewCallList(), + CallList: gosec.NewCallList(), + fmtCalls: gosec.NewCallList(), noIssue: gosec.NewCallList(), noIssueQuoted: gosec.NewCallList(), sqlStatement: sqlStatement{ patterns: []*regexp.Regexp{ - regexp.MustCompile("(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) "), + regexp.MustCompile("(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) "), regexp.MustCompile("%[^bdoxXfFp]"), }, MetaData: gosec.MetaData{ @@ -212,8 +293,11 @@ func NewSQLStrFormat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { }, }, } - rule.calls.AddAll("fmt", "Sprint", "Sprintf", "Sprintln", "Fprintf") + rule.AddAll("*database/sql.DB", "Query", "QueryContext", "QueryRow", "QueryRowContext") + rule.AddAll("*database/sql.Tx", "Query", "QueryContext", "QueryRow", "QueryRowContext") + rule.fmtCalls.AddAll("fmt", "Sprint", "Sprintf", "Sprintln", "Fprintf") rule.noIssue.AddAll("os", "Stdout", "Stderr") rule.noIssueQuoted.Add("github.com/lib/pq", "QuoteIdentifier") - return rule, []ast.Node{(*ast.CallExpr)(nil)} + + return rule, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)} } diff --git a/vendor/github.com/securego/gosec/v2/rules/tls.go b/vendor/github.com/securego/gosec/v2/rules/tls.go index fab9ee1..a013788 100644 --- a/vendor/github.com/securego/gosec/v2/rules/tls.go +++ b/vendor/github.com/securego/gosec/v2/rules/tls.go @@ -17,6 +17,7 @@ package rules import ( + "crypto/tls" "fmt" "go/ast" @@ -25,10 +26,12 @@ import ( type insecureConfigTLS struct { gosec.MetaData - MinVersion int16 - MaxVersion int16 - requiredType string - goodCiphers []string + MinVersion int16 + MaxVersion int16 + requiredType string + goodCiphers []string + actualMinVersion int16 + actualMaxVersion int16 } func (t *insecureConfigTLS) ID() string { @@ -45,7 +48,6 @@ func stringInSlice(a string, list []string) bool { } func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gosec.Context) *gosec.Issue { - if ciphers, ok := n.(*ast.CompositeLit); ok { for _, cipher := range ciphers.Elts { if ident, ok := cipher.(*ast.SelectorExpr); ok { @@ -62,7 +64,6 @@ func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gosec.Context) func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Context) *gosec.Issue { if ident, ok := n.Key.(*ast.Ident); ok { switch ident.Name { - case "InsecureSkipVerify": if node, ok := n.Value.(*ast.Ident); ok { if node.Name != "false" { @@ -85,20 +86,24 @@ func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Cont case "MinVersion": if ival, ierr := gosec.GetInt(n.Value); ierr == nil { - if (int16)(ival) < t.MinVersion { - return gosec.NewIssue(c, n, t.ID(), "TLS MinVersion too low.", gosec.High, gosec.High) + t.actualMinVersion = (int16)(ival) + } else { + if se, ok := n.Value.(*ast.SelectorExpr); ok { + if pkg, ok := se.X.(*ast.Ident); ok && pkg.Name == "tls" { + t.actualMinVersion = t.mapVersion(se.Sel.Name) + } } - // TODO(tk): symbol tab look up to get the actual value - return gosec.NewIssue(c, n, t.ID(), "TLS MinVersion may be too low.", gosec.High, gosec.Low) } case "MaxVersion": if ival, ierr := gosec.GetInt(n.Value); ierr == nil { - if (int16)(ival) < t.MaxVersion { - return gosec.NewIssue(c, n, t.ID(), "TLS MaxVersion too low.", gosec.High, gosec.High) + t.actualMaxVersion = (int16)(ival) + } else { + if se, ok := n.Value.(*ast.SelectorExpr); ok { + if pkg, ok := se.X.(*ast.Ident); ok && pkg.Name == "tls" { + t.actualMaxVersion = t.mapVersion(se.Sel.Name) + } } - // TODO(tk): symbol tab look up to get the actual value - return gosec.NewIssue(c, n, t.ID(), "TLS MaxVersion may be too low.", gosec.High, gosec.Low) } case "CipherSuites": @@ -112,6 +117,35 @@ func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Cont return nil } +func (t *insecureConfigTLS) mapVersion(version string) int16 { + var v int16 + switch version { + case "VersionTLS13": + v = tls.VersionTLS13 + case "VersionTLS12": + v = tls.VersionTLS12 + case "VersionTLS11": + v = tls.VersionTLS11 + case "VersionTLS10": + v = tls.VersionTLS10 + } + return v +} + +func (t *insecureConfigTLS) checkVersion(n ast.Node, c *gosec.Context) *gosec.Issue { + if t.actualMaxVersion == 0 && t.actualMinVersion >= t.MinVersion { + // no warning is generated since the min version is greater than the secure min version + return nil + } + if t.actualMinVersion < t.MinVersion { + return gosec.NewIssue(c, n, t.ID(), "TLS MinVersion too low.", gosec.High, gosec.High) + } + if t.actualMaxVersion < t.MaxVersion { + return gosec.NewIssue(c, n, t.ID(), "TLS MaxVersion too low.", gosec.High, gosec.High) + } + return nil +} + func (t *insecureConfigTLS) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { if complit, ok := n.(*ast.CompositeLit); ok && complit.Type != nil { actualType := c.Info.TypeOf(complit.Type) @@ -124,6 +158,7 @@ func (t *insecureConfigTLS) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, e } } } + return t.checkVersion(complit, c), nil } } return nil, nil diff --git a/vendor/github.com/securego/gosec/v2/rules/tls_config.go b/vendor/github.com/securego/gosec/v2/rules/tls_config.go index ff4f3fe..5d68593 100644 --- a/vendor/github.com/securego/gosec/v2/rules/tls_config.go +++ b/vendor/github.com/securego/gosec/v2/rules/tls_config.go @@ -39,7 +39,9 @@ func NewIntermediateTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.No "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256", "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384", }, @@ -63,7 +65,9 @@ func NewOldTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256", "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", diff --git a/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go b/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go index 0e45393..eecb88f 100644 --- a/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go +++ b/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go @@ -22,7 +22,7 @@ import ( type usesWeakCryptography struct { gosec.MetaData - blacklist map[string][]string + blocklist map[string][]string } func (r *usesWeakCryptography) ID() string { @@ -30,7 +30,7 @@ func (r *usesWeakCryptography) ID() string { } func (r *usesWeakCryptography) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { - for pkg, funcs := range r.blacklist { + for pkg, funcs := range r.blocklist { if _, matched := gosec.MatchCallByPackage(n, c, pkg, funcs...); matched { return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil } @@ -46,7 +46,7 @@ func NewUsesWeakCryptography(id string, conf gosec.Config) (gosec.Rule, []ast.No calls["crypto/sha1"] = []string{"New", "Sum"} calls["crypto/rc4"] = []string{"NewCipher"} rule := &usesWeakCryptography{ - blacklist: calls, + blocklist: calls, MetaData: gosec.MetaData{ ID: id, Severity: gosec.Medium, diff --git a/vendor/github.com/shazow/go-diff/LICENSE b/vendor/github.com/shazow/go-diff/LICENSE new file mode 100644 index 0000000..85e1e4b --- /dev/null +++ b/vendor/github.com/shazow/go-diff/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Andrey Petrov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/vendor/github.com/shazow/go-diff/difflib/differ.go b/vendor/github.com/shazow/go-diff/difflib/differ.go new file mode 100644 index 0000000..43dc84d --- /dev/null +++ b/vendor/github.com/shazow/go-diff/difflib/differ.go @@ -0,0 +1,39 @@ +// This package implements the diff.Differ interface using github.com/mb0/diff as a backend. +package difflib + +import ( + "io" + "io/ioutil" + + "github.com/pmezard/go-difflib/difflib" +) + +type differ struct{} + +// New returns an implementation of diff.Differ using mb0diff as the backend. +func New() *differ { + return &differ{} +} + +// Diff consumes the entire reader streams into memory before generating a diff +// which then gets filled into the buffer. This implementation stores and +// manipulates all three values in memory. +func (diff *differ) Diff(out io.Writer, a io.ReadSeeker, b io.ReadSeeker) error { + var src, dst []byte + var err error + + if src, err = ioutil.ReadAll(a); err != nil { + return err + } + if dst, err = ioutil.ReadAll(b); err != nil { + return err + } + + d := difflib.UnifiedDiff{ + A: difflib.SplitLines(string(src)), + B: difflib.SplitLines(string(dst)), + Context: 3, + } + + return difflib.WriteUnifiedDiff(out, d) +} diff --git a/vendor/github.com/sirupsen/logrus/.gitignore b/vendor/github.com/sirupsen/logrus/.gitignore index 6b7d7d1..1fb13ab 100644 --- a/vendor/github.com/sirupsen/logrus/.gitignore +++ b/vendor/github.com/sirupsen/logrus/.gitignore @@ -1,2 +1,4 @@ logrus vendor + +.idea/ diff --git a/vendor/github.com/sirupsen/logrus/.golangci.yml b/vendor/github.com/sirupsen/logrus/.golangci.yml new file mode 100644 index 0000000..65dc285 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/.golangci.yml @@ -0,0 +1,40 @@ +run: + # do not run on test files yet + tests: false + +# all available settings of specific linters +linters-settings: + errcheck: + # report about not checking of errors in type assetions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + + lll: + line-length: 100 + tab-width: 4 + + prealloc: + simple: false + range-loops: false + for-loops: false + + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature + +linters: + enable: + - megacheck + - govet + disable: + - maligned + - prealloc + disable-all: false + presets: + - bugs + - unused + fast: false diff --git a/vendor/github.com/sirupsen/logrus/.travis.yml b/vendor/github.com/sirupsen/logrus/.travis.yml index 848938a..5e20aa4 100644 --- a/vendor/github.com/sirupsen/logrus/.travis.yml +++ b/vendor/github.com/sirupsen/logrus/.travis.yml @@ -4,21 +4,13 @@ git: depth: 1 env: - GO111MODULE=on - - GO111MODULE=off -go: [ 1.11.x, 1.12.x ] -os: [ linux, osx ] -matrix: - exclude: - - go: 1.12.x - env: GO111MODULE=off - - go: 1.11.x - os: osx +go: [1.13.x, 1.14.x] +os: [linux, osx] install: - ./travis/install.sh - - if [[ "$GO111MODULE" == "on" ]]; then go mod download; fi - - if [[ "$GO111MODULE" == "off" ]]; then go get github.com/stretchr/testify/assert golang.org/x/sys/unix github.com/konsorten/go-windows-terminal-sequences; fi script: - ./travis/cross_build.sh + - ./travis/lint.sh - export GOMAXPROCS=4 - export GORACE=halt_on_error=1 - go test -race -v ./... diff --git a/vendor/github.com/sirupsen/logrus/CHANGELOG.md b/vendor/github.com/sirupsen/logrus/CHANGELOG.md index 51a7ab0..584026d 100644 --- a/vendor/github.com/sirupsen/logrus/CHANGELOG.md +++ b/vendor/github.com/sirupsen/logrus/CHANGELOG.md @@ -1,9 +1,32 @@ +# 1.6.0 +Fixes: + * end of line cleanup + * revert the entry concurrency bug fix whic leads to deadlock under some circumstances + * update dependency on go-windows-terminal-sequences to fix a crash with go 1.14 + +Features: + * add an option to the `TextFormatter` to completely disable fields quoting + +# 1.5.0 +Code quality: + * add golangci linter run on travis + +Fixes: + * add mutex for hooks concurrent access on `Entry` data + * caller function field for go1.14 + * fix build issue for gopherjs target + +Feature: + * add an hooks/writer sub-package whose goal is to split output on different stream depending on the trace level + * add a `DisableHTMLEscape` option in the `JSONFormatter` + * add `ForceQuote` and `PadLevelText` options in the `TextFormatter` + # 1.4.2 * Fixes build break for plan9, nacl, solaris # 1.4.1 This new release introduces: * Enhance TextFormatter to not print caller information when they are empty (#944) - * Remove dependency on golang.org/x/crypto (#932, #943) + * Remove dependency on golang.org/x/crypto (#932, #943) Fixes: * Fix Entry.WithContext method to return a copy of the initial entry (#941) @@ -11,7 +34,7 @@ Fixes: # 1.4.0 This new release introduces: * Add `DeferExitHandler`, similar to `RegisterExitHandler` but prepending the handler to the list of handlers (semantically like `defer`) (#848). - * Add `CallerPrettyfier` to `JSONFormatter` and `TextFormatter (#909, #911) + * Add `CallerPrettyfier` to `JSONFormatter` and `TextFormatter` (#909, #911) * Add `Entry.WithContext()` and `Entry.Context`, to set a context on entries to be used e.g. in hooks (#919). Fixes: diff --git a/vendor/github.com/sirupsen/logrus/README.md b/vendor/github.com/sirupsen/logrus/README.md index a4796eb..5796706 100644 --- a/vendor/github.com/sirupsen/logrus/README.md +++ b/vendor/github.com/sirupsen/logrus/README.md @@ -1,8 +1,28 @@ -# Logrus :walrus: [![Build Status](https://travis-ci.org/sirupsen/logrus.svg?branch=master)](https://travis-ci.org/sirupsen/logrus) [![GoDoc](https://godoc.org/github.com/sirupsen/logrus?status.svg)](https://godoc.org/github.com/sirupsen/logrus) +# Logrus :walrus: [![Build Status](https://travis-ci.org/sirupsen/logrus.svg?branch=master)](https://travis-ci.org/sirupsen/logrus) [![GoDoc](https://godoc.org/github.com/sirupsen/logrus?status.svg)](https://godoc.org/github.com/sirupsen/logrus) Logrus is a structured logger for Go (golang), completely API compatible with the standard library logger. +**Logrus is in maintenance-mode.** We will not be introducing new features. It's +simply too hard to do in a way that won't break many people's projects, which is +the last thing you want from your Logging library (again...). + +This does not mean Logrus is dead. Logrus will continue to be maintained for +security, (backwards compatible) bug fixes, and performance (where we are +limited by the interface). + +I believe Logrus' biggest contribution is to have played a part in today's +widespread use of structured logging in Golang. There doesn't seem to be a +reason to do a major, breaking iteration into Logrus V2, since the fantastic Go +community has built those independently. Many fantastic alternatives have sprung +up. Logrus would look like those, had it been re-designed with what we know +about structured logging in Go today. Check out, for example, +[Zerolog][zerolog], [Zap][zap], and [Apex][apex]. + +[zerolog]: https://github.com/rs/zerolog +[zap]: https://github.com/uber-go/zap +[apex]: https://github.com/apex/log + **Seeing weird case-sensitive problems?** It's in the past been possible to import Logrus as both upper- and lower-case. Due to the Go package environment, this caused issues in the community and we needed a standard. Some environments @@ -15,11 +35,6 @@ comments](https://github.com/sirupsen/logrus/issues/553#issuecomment-306591437). For an in-depth explanation of the casing issue, see [this comment](https://github.com/sirupsen/logrus/issues/570#issuecomment-313933276). -**Are you interested in assisting in maintaining Logrus?** Currently I have a -lot of obligations, and I am unable to provide Logrus with the maintainership it -needs. If you'd like to help, please reach out to me at `simon at author's -username dot com`. - Nicely color-coded in development (when a TTY is attached, otherwise just plain text): @@ -187,7 +202,7 @@ func main() { log.Out = os.Stdout // You could set this to any `io.Writer` such as a file - // file, err := os.OpenFile("logrus.log", os.O_CREATE|os.O_WRONLY, 0666) + // file, err := os.OpenFile("logrus.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666) // if err == nil { // log.Out = file // } else { @@ -272,7 +287,7 @@ func init() { ``` Note: Syslog hook also support connecting to local syslog (Ex. "/dev/log" or "/var/run/syslog" or "/var/run/log"). For the detail, please check the [syslog hook README](hooks/syslog/README.md). -A list of currently known of service hook can be found in this wiki [page](https://github.com/sirupsen/logrus/wiki/Hooks) +A list of currently known service hooks can be found in this wiki [page](https://github.com/sirupsen/logrus/wiki/Hooks) #### Level logging @@ -354,6 +369,7 @@ The built-in logging formatters are: [github.com/mattn/go-colorable](https://github.com/mattn/go-colorable). * When colors are enabled, levels are truncated to 4 characters by default. To disable truncation set the `DisableLevelTruncation` field to `true`. + * When outputting to a TTY, it's often helpful to visually scan down a column where all the levels are the same width. Setting the `PadLevelText` field to `true` enables this behavior, by adding padding to the level text. * All options are listed in the [generated docs](https://godoc.org/github.com/sirupsen/logrus#TextFormatter). * `logrus.JSONFormatter`. Logs fields as JSON. * All options are listed in the [generated docs](https://godoc.org/github.com/sirupsen/logrus#JSONFormatter). @@ -364,8 +380,10 @@ Third party logging formatters: * [`GELF`](https://github.com/fabienm/go-logrus-formatters). Formats entries so they comply to Graylog's [GELF 1.1 specification](http://docs.graylog.org/en/2.4/pages/gelf.html). * [`logstash`](https://github.com/bshuster-repo/logrus-logstash-hook). Logs fields as [Logstash](http://logstash.net) Events. * [`prefixed`](https://github.com/x-cray/logrus-prefixed-formatter). Displays log entry source along with alternative layout. -* [`zalgo`](https://github.com/aybabtme/logzalgo). Invoking the P͉̫o̳̼̊w̖͈̰͎e̬͔̭͂r͚̼̹̲ ̫͓͉̳͈ō̠͕͖̚f̝͍̠ ͕̲̞͖͑Z̖̫̤̫ͪa͉̬͈̗l͖͎g̳̥o̰̥̅!̣͔̲̻͊̄ ̙̘̦̹̦. +* [`zalgo`](https://github.com/aybabtme/logzalgo). Invoking the Power of Zalgo. * [`nested-logrus-formatter`](https://github.com/antonfisher/nested-logrus-formatter). Converts logrus fields to a nested structure. +* [`powerful-logrus-formatter`](https://github.com/zput/zxcTool). get fileName, log's line number and the latest function's name when print log; Sava log to files. +* [`caption-json-formatter`](https://github.com/nolleh/caption_json_formatter). logrus's message json formatter with human-readable caption added. You can define your formatter by implementing the `Formatter` interface, requiring a `Format` method. `Format` takes an `*Entry`. `entry.Data` is a @@ -430,14 +448,14 @@ entries. It should not be a feature of the application-level logger. | Tool | Description | | ---- | ----------- | -|[Logrus Mate](https://github.com/gogap/logrus_mate)|Logrus mate is a tool for Logrus to manage loggers, you can initial logger's level, hook and formatter by config file, the logger will generated with different config at different environment.| +|[Logrus Mate](https://github.com/gogap/logrus_mate)|Logrus mate is a tool for Logrus to manage loggers, you can initial logger's level, hook and formatter by config file, the logger will be generated with different configs in different environments.| |[Logrus Viper Helper](https://github.com/heirko/go-contrib/tree/master/logrusHelper)|An Helper around Logrus to wrap with spf13/Viper to load configuration with fangs! And to simplify Logrus configuration use some behavior of [Logrus Mate](https://github.com/gogap/logrus_mate). [sample](https://github.com/heirko/iris-contrib/blob/master/middleware/logrus-logger/example) | #### Testing Logrus has a built in facility for asserting the presence of log messages. This is implemented through the `test` hook and provides: -* decorators for existing logger (`test.NewLocal` and `test.NewGlobal`) which basically just add the `test` hook +* decorators for existing logger (`test.NewLocal` and `test.NewGlobal`) which basically just adds the `test` hook * a test logger (`test.NewNullLogger`) that just records log messages (and does not output any): ```go @@ -465,7 +483,7 @@ func TestSomething(t*testing.T){ Logrus can register one or more functions that will be called when any `fatal` level message is logged. The registered handlers will be executed before -logrus performs a `os.Exit(1)`. This behavior may be helpful if callers need +logrus performs an `os.Exit(1)`. This behavior may be helpful if callers need to gracefully shutdown. Unlike a `panic("Something went wrong...")` call which can be intercepted with a deferred `recover` a call to `os.Exit(1)` can not be intercepted. ``` @@ -490,6 +508,6 @@ Situation when locking is not needed includes: 1) logger.Out is protected by locks. - 2) logger.Out is a os.File handler opened with `O_APPEND` flag, and every write is smaller than 4k. (This allow multi-thread/multi-process writing) + 2) logger.Out is an os.File handler opened with `O_APPEND` flag, and every write is smaller than 4k. (This allows multi-thread/multi-process writing) (Refer to http://www.notthewizard.com/2014/06/17/are-files-appends-really-atomic/) diff --git a/vendor/github.com/sirupsen/logrus/appveyor.yml b/vendor/github.com/sirupsen/logrus/appveyor.yml index 96c2ce1..df9d65c 100644 --- a/vendor/github.com/sirupsen/logrus/appveyor.yml +++ b/vendor/github.com/sirupsen/logrus/appveyor.yml @@ -1,14 +1,14 @@ -version: "{build}" -platform: x64 -clone_folder: c:\gopath\src\github.com\sirupsen\logrus -environment: - GOPATH: c:\gopath -branches: - only: - - master -install: - - set PATH=%GOPATH%\bin;c:\go\bin;%PATH% - - go version -build_script: - - go get -t - - go test +version: "{build}" +platform: x64 +clone_folder: c:\gopath\src\github.com\sirupsen\logrus +environment: + GOPATH: c:\gopath +branches: + only: + - master +install: + - set PATH=%GOPATH%\bin;c:\go\bin;%PATH% + - go version +build_script: + - go get -t + - go test diff --git a/vendor/github.com/sirupsen/logrus/buffer_pool.go b/vendor/github.com/sirupsen/logrus/buffer_pool.go new file mode 100644 index 0000000..4545dec --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/buffer_pool.go @@ -0,0 +1,52 @@ +package logrus + +import ( + "bytes" + "sync" +) + +var ( + bufferPool BufferPool +) + +type BufferPool interface { + Put(*bytes.Buffer) + Get() *bytes.Buffer +} + +type defaultPool struct { + pool *sync.Pool +} + +func (p *defaultPool) Put(buf *bytes.Buffer) { + p.pool.Put(buf) +} + +func (p *defaultPool) Get() *bytes.Buffer { + return p.pool.Get().(*bytes.Buffer) +} + +func getBuffer() *bytes.Buffer { + return bufferPool.Get() +} + +func putBuffer(buf *bytes.Buffer) { + buf.Reset() + bufferPool.Put(buf) +} + +// SetBufferPool allows to replace the default logrus buffer pool +// to better meets the specific needs of an application. +func SetBufferPool(bp BufferPool) { + bufferPool = bp +} + +func init() { + SetBufferPool(&defaultPool{ + pool: &sync.Pool{ + New: func() interface{} { + return new(bytes.Buffer) + }, + }, + }) +} diff --git a/vendor/github.com/sirupsen/logrus/entry.go b/vendor/github.com/sirupsen/logrus/entry.go index 63e2558..5a5cbfe 100644 --- a/vendor/github.com/sirupsen/logrus/entry.go +++ b/vendor/github.com/sirupsen/logrus/entry.go @@ -13,7 +13,6 @@ import ( ) var ( - bufferPool *sync.Pool // qualified package name, cached at first use logrusPackage string @@ -31,12 +30,6 @@ const ( ) func init() { - bufferPool = &sync.Pool{ - New: func() interface{} { - return new(bytes.Buffer) - }, - } - // start at the bottom of the stack before the package-name cache is primed minimumCallerDepth = 1 } @@ -85,10 +78,15 @@ func NewEntry(logger *Logger) *Entry { } } +// Returns the bytes representation of this entry from the formatter. +func (entry *Entry) Bytes() ([]byte, error) { + return entry.Logger.Formatter.Format(entry) +} + // Returns the string representation from the reader and ultimately the // formatter. func (entry *Entry) String() (string, error) { - serialized, err := entry.Logger.Formatter.Format(entry) + serialized, err := entry.Bytes() if err != nil { return "", err } @@ -103,7 +101,11 @@ func (entry *Entry) WithError(err error) *Entry { // Add a context to the Entry. func (entry *Entry) WithContext(ctx context.Context) *Entry { - return &Entry{Logger: entry.Logger, Data: entry.Data, Time: entry.Time, err: entry.err, Context: ctx} + dataCopy := make(Fields, len(entry.Data)) + for k, v := range entry.Data { + dataCopy[k] = v + } + return &Entry{Logger: entry.Logger, Data: dataCopy, Time: entry.Time, err: entry.err, Context: ctx} } // Add a single field to the Entry. @@ -144,7 +146,11 @@ func (entry *Entry) WithFields(fields Fields) *Entry { // Overrides the time of the Entry. func (entry *Entry) WithTime(t time.Time) *Entry { - return &Entry{Logger: entry.Logger, Data: entry.Data, Time: t, err: entry.err, Context: entry.Context} + dataCopy := make(Fields, len(entry.Data)) + for k, v := range entry.Data { + dataCopy[k] = v + } + return &Entry{Logger: entry.Logger, Data: dataCopy, Time: t, err: entry.err, Context: entry.Context} } // getPackageName reduces a fully qualified function name to the package name @@ -165,15 +171,20 @@ func getPackageName(f string) string { // getCaller retrieves the name of the first non-logrus calling function func getCaller() *runtime.Frame { - // cache this package's fully-qualified name callerInitOnce.Do(func() { - pcs := make([]uintptr, 2) + pcs := make([]uintptr, maximumCallerDepth) _ = runtime.Callers(0, pcs) - logrusPackage = getPackageName(runtime.FuncForPC(pcs[1]).Name()) - // now that we have the cache, we can skip a minimum count of known-logrus functions - // XXX this is dubious, the number of frames may vary + // dynamic get the package name and the minimum caller depth + for i := 0; i < maximumCallerDepth; i++ { + funcName := runtime.FuncForPC(pcs[i]).Name() + if strings.Contains(funcName, "getCaller") { + logrusPackage = getPackageName(funcName) + break + } + } + minimumCallerDepth = knownLogrusFrames }) @@ -187,7 +198,7 @@ func getCaller() *runtime.Frame { // If the caller isn't part of this package, we're done if pkg != logrusPackage { - return &f + return &f //nolint:scopelint } } @@ -217,15 +228,20 @@ func (entry Entry) log(level Level, msg string) { entry.Level = level entry.Message = msg + entry.Logger.mu.Lock() if entry.Logger.ReportCaller { entry.Caller = getCaller() } + entry.Logger.mu.Unlock() entry.fireHooks() - buffer = bufferPool.Get().(*bytes.Buffer) + buffer = getBuffer() + defer func() { + entry.Buffer = nil + putBuffer(buffer) + }() buffer.Reset() - defer bufferPool.Put(buffer) entry.Buffer = buffer entry.write() @@ -255,11 +271,10 @@ func (entry *Entry) write() { serialized, err := entry.Logger.Formatter.Format(entry) if err != nil { fmt.Fprintf(os.Stderr, "Failed to obtain reader, %v\n", err) - } else { - _, err = entry.Logger.Out.Write(serialized) - if err != nil { - fmt.Fprintf(os.Stderr, "Failed to write to log, %v\n", err) - } + return + } + if _, err = entry.Logger.Out.Write(serialized); err != nil { + fmt.Fprintf(os.Stderr, "Failed to write to log, %v\n", err) } } diff --git a/vendor/github.com/sirupsen/logrus/exported.go b/vendor/github.com/sirupsen/logrus/exported.go index 62fc2f2..017c30c 100644 --- a/vendor/github.com/sirupsen/logrus/exported.go +++ b/vendor/github.com/sirupsen/logrus/exported.go @@ -80,7 +80,7 @@ func WithFields(fields Fields) *Entry { return std.WithFields(fields) } -// WithTime creats an entry from the standard logger and overrides the time of +// WithTime creates an entry from the standard logger and overrides the time of // logs generated with it. // // Note that it doesn't log until you call Debug, Print, Info, Warn, Fatal @@ -134,6 +134,51 @@ func Fatal(args ...interface{}) { std.Fatal(args...) } +// TraceFn logs a message from a func at level Trace on the standard logger. +func TraceFn(fn LogFunction) { + std.TraceFn(fn) +} + +// DebugFn logs a message from a func at level Debug on the standard logger. +func DebugFn(fn LogFunction) { + std.DebugFn(fn) +} + +// PrintFn logs a message from a func at level Info on the standard logger. +func PrintFn(fn LogFunction) { + std.PrintFn(fn) +} + +// InfoFn logs a message from a func at level Info on the standard logger. +func InfoFn(fn LogFunction) { + std.InfoFn(fn) +} + +// WarnFn logs a message from a func at level Warn on the standard logger. +func WarnFn(fn LogFunction) { + std.WarnFn(fn) +} + +// WarningFn logs a message from a func at level Warn on the standard logger. +func WarningFn(fn LogFunction) { + std.WarningFn(fn) +} + +// ErrorFn logs a message from a func at level Error on the standard logger. +func ErrorFn(fn LogFunction) { + std.ErrorFn(fn) +} + +// PanicFn logs a message from a func at level Panic on the standard logger. +func PanicFn(fn LogFunction) { + std.PanicFn(fn) +} + +// FatalFn logs a message from a func at level Fatal on the standard logger then the process will exit with status set to 1. +func FatalFn(fn LogFunction) { + std.FatalFn(fn) +} + // Tracef logs a message at level Trace on the standard logger. func Tracef(format string, args ...interface{}) { std.Tracef(format, args...) diff --git a/vendor/github.com/sirupsen/logrus/go.mod b/vendor/github.com/sirupsen/logrus/go.mod index 12fdf98..b3919d5 100644 --- a/vendor/github.com/sirupsen/logrus/go.mod +++ b/vendor/github.com/sirupsen/logrus/go.mod @@ -2,9 +2,9 @@ module github.com/sirupsen/logrus require ( github.com/davecgh/go-spew v1.1.1 // indirect - github.com/konsorten/go-windows-terminal-sequences v1.0.1 github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/stretchr/objx v0.1.1 // indirect github.com/stretchr/testify v1.2.2 - golang.org/x/sys v0.0.0-20190422165155-953cdadca894 + golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 ) + +go 1.13 diff --git a/vendor/github.com/sirupsen/logrus/go.sum b/vendor/github.com/sirupsen/logrus/go.sum index 596c318..1edc143 100644 --- a/vendor/github.com/sirupsen/logrus/go.sum +++ b/vendor/github.com/sirupsen/logrus/go.sum @@ -1,16 +1,10 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/konsorten/go-windows-terminal-sequences v0.0.0-20180402223658-b729f2633dfe h1:CHRGQ8V7OlCYtwaKPJi3iA7J+YdNKdo8j7nG5IgDhjs= -github.com/konsorten/go-windows-terminal-sequences v0.0.0-20180402223658-b729f2633dfe/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33 h1:I6FyU15t786LL7oL/hn43zqTuEGr4PN7F4XJ1p4E3Y8= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190422165155-953cdadca894 h1:Cz4ceDQGXuKRnVBDTS23GTn/pU5OE2C0WrNTOYK1Uuc= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/vendor/github.com/sirupsen/logrus/json_formatter.go b/vendor/github.com/sirupsen/logrus/json_formatter.go index 098a21a..ba7f237 100644 --- a/vendor/github.com/sirupsen/logrus/json_formatter.go +++ b/vendor/github.com/sirupsen/logrus/json_formatter.go @@ -28,6 +28,9 @@ type JSONFormatter struct { // DisableTimestamp allows disabling automatic timestamps in output DisableTimestamp bool + // DisableHTMLEscape allows disabling html escaping in output + DisableHTMLEscape bool + // DataKey allows users to put all the log entry parameters into a nested dictionary at a given key. DataKey string @@ -110,6 +113,7 @@ func (f *JSONFormatter) Format(entry *Entry) ([]byte, error) { } encoder := json.NewEncoder(b) + encoder.SetEscapeHTML(!f.DisableHTMLEscape) if f.PrettyPrint { encoder.SetIndent("", " ") } diff --git a/vendor/github.com/sirupsen/logrus/logger.go b/vendor/github.com/sirupsen/logrus/logger.go index c0c0b1e..dbf627c 100644 --- a/vendor/github.com/sirupsen/logrus/logger.go +++ b/vendor/github.com/sirupsen/logrus/logger.go @@ -9,6 +9,11 @@ import ( "time" ) +// LogFunction For big messages, it can be more efficient to pass a function +// and only call it if the log level is actually enables rather than +// generating the log message and then checking if the level is enabled +type LogFunction func()[]interface{} + type Logger struct { // The logs are `io.Copy`'d to this in a mutex. It's common to set this to a // file, or leave it default which is `os.Stderr`. You can also set this to @@ -68,10 +73,10 @@ func (mw *MutexWrap) Disable() { // `Out` and `Hooks` directly on the default logger instance. You can also just // instantiate your own: // -// var log = &Logger{ +// var log = &logrus.Logger{ // Out: os.Stderr, -// Formatter: new(JSONFormatter), -// Hooks: make(LevelHooks), +// Formatter: new(logrus.TextFormatter), +// Hooks: make(logrus.LevelHooks), // Level: logrus.DebugLevel, // } // @@ -100,8 +105,9 @@ func (logger *Logger) releaseEntry(entry *Entry) { logger.entryPool.Put(entry) } -// Adds a field to the log entry, note that it doesn't log until you call -// Debug, Print, Info, Warn, Error, Fatal or Panic. It only creates a log entry. +// WithField allocates a new entry and adds a field to it. +// Debug, Print, Info, Warn, Error, Fatal or Panic must be then applied to +// this new returned entry. // If you want multiple fields, use `WithFields`. func (logger *Logger) WithField(key string, value interface{}) *Entry { entry := logger.newEntry() @@ -194,6 +200,14 @@ func (logger *Logger) Log(level Level, args ...interface{}) { } } +func (logger *Logger) LogFn(level Level, fn LogFunction) { + if logger.IsLevelEnabled(level) { + entry := logger.newEntry() + entry.Log(level, fn()...) + logger.releaseEntry(entry) + } +} + func (logger *Logger) Trace(args ...interface{}) { logger.Log(TraceLevel, args...) } @@ -233,6 +247,45 @@ func (logger *Logger) Panic(args ...interface{}) { logger.Log(PanicLevel, args...) } +func (logger *Logger) TraceFn(fn LogFunction) { + logger.LogFn(TraceLevel, fn) +} + +func (logger *Logger) DebugFn(fn LogFunction) { + logger.LogFn(DebugLevel, fn) +} + +func (logger *Logger) InfoFn(fn LogFunction) { + logger.LogFn(InfoLevel, fn) +} + +func (logger *Logger) PrintFn(fn LogFunction) { + entry := logger.newEntry() + entry.Print(fn()...) + logger.releaseEntry(entry) +} + +func (logger *Logger) WarnFn(fn LogFunction) { + logger.LogFn(WarnLevel, fn) +} + +func (logger *Logger) WarningFn(fn LogFunction) { + logger.WarnFn(fn) +} + +func (logger *Logger) ErrorFn(fn LogFunction) { + logger.LogFn(ErrorLevel, fn) +} + +func (logger *Logger) FatalFn(fn LogFunction) { + logger.LogFn(FatalLevel, fn) + logger.Exit(1) +} + +func (logger *Logger) PanicFn(fn LogFunction) { + logger.LogFn(PanicLevel, fn) +} + func (logger *Logger) Logln(level Level, args ...interface{}) { if logger.IsLevelEnabled(level) { entry := logger.newEntry() diff --git a/vendor/github.com/sirupsen/logrus/logrus.go b/vendor/github.com/sirupsen/logrus/logrus.go index 8644761..2f16224 100644 --- a/vendor/github.com/sirupsen/logrus/logrus.go +++ b/vendor/github.com/sirupsen/logrus/logrus.go @@ -51,7 +51,7 @@ func (level *Level) UnmarshalText(text []byte) error { return err } - *level = Level(l) + *level = l return nil } diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go b/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go index 3c4f43f..4997899 100644 --- a/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go +++ b/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go @@ -1,4 +1,5 @@ // +build darwin dragonfly freebsd netbsd openbsd +// +build !js package logrus @@ -10,4 +11,3 @@ func isTerminal(fd int) bool { _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) return err == nil } - diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_js.go b/vendor/github.com/sirupsen/logrus/terminal_check_js.go new file mode 100644 index 0000000..ebdae3e --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_js.go @@ -0,0 +1,7 @@ +// +build js + +package logrus + +func isTerminal(fd int) bool { + return false +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_unix.go b/vendor/github.com/sirupsen/logrus/terminal_check_unix.go index 355dc96..cc4fe6e 100644 --- a/vendor/github.com/sirupsen/logrus/terminal_check_unix.go +++ b/vendor/github.com/sirupsen/logrus/terminal_check_unix.go @@ -1,4 +1,5 @@ // +build linux aix +// +build !js package logrus @@ -10,4 +11,3 @@ func isTerminal(fd int) bool { _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) return err == nil } - diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_windows.go b/vendor/github.com/sirupsen/logrus/terminal_check_windows.go index 572889d..2879eb5 100644 --- a/vendor/github.com/sirupsen/logrus/terminal_check_windows.go +++ b/vendor/github.com/sirupsen/logrus/terminal_check_windows.go @@ -5,30 +5,23 @@ package logrus import ( "io" "os" - "syscall" - sequences "github.com/konsorten/go-windows-terminal-sequences" + "golang.org/x/sys/windows" ) -func initTerminal(w io.Writer) { - switch v := w.(type) { - case *os.File: - sequences.EnableVirtualTerminalProcessing(syscall.Handle(v.Fd()), true) - } -} - func checkIfTerminal(w io.Writer) bool { - var ret bool switch v := w.(type) { case *os.File: + handle := windows.Handle(v.Fd()) var mode uint32 - err := syscall.GetConsoleMode(syscall.Handle(v.Fd()), &mode) - ret = (err == nil) - default: - ret = false - } - if ret { - initTerminal(w) + if err := windows.GetConsoleMode(handle, &mode); err != nil { + return false + } + mode |= windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING + if err := windows.SetConsoleMode(handle, mode); err != nil { + return false + } + return true } - return ret + return false } diff --git a/vendor/github.com/sirupsen/logrus/text_formatter.go b/vendor/github.com/sirupsen/logrus/text_formatter.go index e01587c..3c28b54 100644 --- a/vendor/github.com/sirupsen/logrus/text_formatter.go +++ b/vendor/github.com/sirupsen/logrus/text_formatter.go @@ -6,9 +6,11 @@ import ( "os" "runtime" "sort" + "strconv" "strings" "sync" "time" + "unicode/utf8" ) const ( @@ -32,6 +34,14 @@ type TextFormatter struct { // Force disabling colors. DisableColors bool + // Force quoting of all values + ForceQuote bool + + // DisableQuote disables quoting for all values. + // DisableQuote will have a lower priority than ForceQuote. + // If both of them are set to true, quote will be forced on all values. + DisableQuote bool + // Override coloring based on CLICOLOR and CLICOLOR_FORCE. - https://bixense.com/clicolors/ EnvironmentOverrideColors bool @@ -57,6 +67,10 @@ type TextFormatter struct { // Disables the truncation of the level text to 4 characters. DisableLevelTruncation bool + // PadLevelText Adds padding the level text so that all the levels output at the same length + // PadLevelText is a superset of the DisableLevelTruncation option + PadLevelText bool + // QuoteEmptyFields will wrap empty fields in quotes if true QuoteEmptyFields bool @@ -79,23 +93,32 @@ type TextFormatter struct { CallerPrettyfier func(*runtime.Frame) (function string, file string) terminalInitOnce sync.Once + + // The max length of the level text, generated dynamically on init + levelTextMaxLength int } func (f *TextFormatter) init(entry *Entry) { if entry.Logger != nil { f.isTerminal = checkIfTerminal(entry.Logger.Out) } + // Get the max length of the level text + for _, level := range AllLevels { + levelTextLength := utf8.RuneCount([]byte(level.String())) + if levelTextLength > f.levelTextMaxLength { + f.levelTextMaxLength = levelTextLength + } + } } func (f *TextFormatter) isColored() bool { isColored := f.ForceColors || (f.isTerminal && (runtime.GOOS != "windows")) if f.EnvironmentOverrideColors { - if force, ok := os.LookupEnv("CLICOLOR_FORCE"); ok && force != "0" { + switch force, ok := os.LookupEnv("CLICOLOR_FORCE"); { + case ok && force != "0": isColored = true - } else if ok && force == "0" { - isColored = false - } else if os.Getenv("CLICOLOR") == "0" { + case ok && force == "0", os.Getenv("CLICOLOR") == "0": isColored = false } } @@ -217,9 +240,18 @@ func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []strin } levelText := strings.ToUpper(entry.Level.String()) - if !f.DisableLevelTruncation { + if !f.DisableLevelTruncation && !f.PadLevelText { levelText = levelText[0:4] } + if f.PadLevelText { + // Generates the format string used in the next line, for example "%-6s" or "%-7s". + // Based on the max level text length. + formatString := "%-" + strconv.Itoa(f.levelTextMaxLength) + "s" + // Formats the level text by appending spaces up to the max length, for example: + // - "INFO " + // - "WARNING" + levelText = fmt.Sprintf(formatString, levelText) + } // Remove a single newline if it already exists in the message to keep // the behavior of logrus text_formatter the same as the stdlib log package @@ -243,11 +275,12 @@ func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []strin } } - if f.DisableTimestamp { + switch { + case f.DisableTimestamp: fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m%s %-44s ", levelColor, levelText, caller, entry.Message) - } else if !f.FullTimestamp { + case !f.FullTimestamp: fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%04d]%s %-44s ", levelColor, levelText, int(entry.Time.Sub(baseTimestamp)/time.Second), caller, entry.Message) - } else { + default: fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%s]%s %-44s ", levelColor, levelText, entry.Time.Format(timestampFormat), caller, entry.Message) } for _, k := range keys { @@ -258,9 +291,15 @@ func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []strin } func (f *TextFormatter) needsQuoting(text string) bool { + if f.ForceQuote { + return true + } if f.QuoteEmptyFields && len(text) == 0 { return true } + if f.DisableQuote { + return false + } for _, ch := range text { if !((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || diff --git a/vendor/github.com/sirupsen/logrus/writer.go b/vendor/github.com/sirupsen/logrus/writer.go index 9e1f751..72e8e3a 100644 --- a/vendor/github.com/sirupsen/logrus/writer.go +++ b/vendor/github.com/sirupsen/logrus/writer.go @@ -6,10 +6,16 @@ import ( "runtime" ) +// Writer at INFO level. See WriterLevel for details. func (logger *Logger) Writer() *io.PipeWriter { return logger.WriterLevel(InfoLevel) } +// WriterLevel returns an io.Writer that can be used to write arbitrary text to +// the logger at the given log level. Each line written to the writer will be +// printed in the usual way using formatters and hooks. The writer is part of an +// io.Pipe and it is the callers responsibility to close the writer when done. +// This can be used to override the standard library logger easily. func (logger *Logger) WriterLevel(level Level) *io.PipeWriter { return NewEntry(logger).WriterLevel(level) } diff --git a/vendor/github.com/sonatard/noctx/.gitignore b/vendor/github.com/sonatard/noctx/.gitignore new file mode 100644 index 0000000..2d83068 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/.gitignore @@ -0,0 +1 @@ +coverage.out diff --git a/vendor/github.com/sonatard/noctx/.golangci.yml b/vendor/github.com/sonatard/noctx/.golangci.yml new file mode 100644 index 0000000..1580acd --- /dev/null +++ b/vendor/github.com/sonatard/noctx/.golangci.yml @@ -0,0 +1,20 @@ +run: + +linters-settings: + govet: + enable-all: true + +linters: + enable-all: true + disable: + - gochecknoglobals + - gomnd + - gocognit + - nestif + +issues: + exclude-rules: + - path: reqwithoutctx/ssa.go + text: "Consider preallocating `exts`" + linters: + - prealloc diff --git a/vendor/github.com/sonatard/noctx/LICENSE b/vendor/github.com/sonatard/noctx/LICENSE new file mode 100644 index 0000000..a00d572 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 sonatard + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/sonatard/noctx/Makefile b/vendor/github.com/sonatard/noctx/Makefile new file mode 100644 index 0000000..1a27f6b --- /dev/null +++ b/vendor/github.com/sonatard/noctx/Makefile @@ -0,0 +1,16 @@ +.PHONY: all imports test lint + +all: imports test lint + +imports: + goimports -w ./ + +test: + go test -race ./... + +test_coverage: + go test -race -coverprofile=coverage.out -covermode=atomic ./... + +lint: + golangci-lint run ./... + diff --git a/vendor/github.com/sonatard/noctx/README.md b/vendor/github.com/sonatard/noctx/README.md new file mode 100644 index 0000000..bfe9782 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/README.md @@ -0,0 +1,95 @@ +# noctx + +![](https://github.com/sonatard/noctx/workflows/.github/workflows/ci.yml/badge.svg) + +`noctx` finds sending http request without context.Context. + +You should use `noctx` if sending http request in your library. +Passing `context.Context` enables library user to cancel http request, getting trace information and so on. + +## Install + +```sh +$ go get -u github.com/sonatard/noctx/cmd/noctx +``` + +## Usage + +```sh +$ go vet -vettool=`which noctx` main.go +./main.go:6:11: net/http.Get must not be called +``` + +## Detection rules +- Executing following functions + - `net/http.Get` + - `net/http.Head` + - `net/http.Post` + - `net/http.PostForm` + - `(*net/http.Client).Get` + - `(*net/http.Client).Head` + - `(*net/http.Client).Post` + - `(*net/http.Client).PostForm` +- `http.Request` returned by `http.NewRequest` function and passes it to other function. + +## How to fix +- Send http request using `(*http.Client).Do(*http.Request)` method. +- In Go 1.13 and later, use `http.NewRequestWithContext` function instead of using `http.NewRequest` function. +- In Go 1.12 and earlier, call `(http.Request).WithContext(ctx)` after `http.NewRequest`. + +`(http.Request).WithContext(ctx)` has a disadvantage of performance because it returns a copy of `http.Request`. Use `http.NewRequestWithContext` function if you only support Go1.13 or later. + +## Sample Code + +```go +package main + +import ( + "context" + "net/http" +) + +func main() { + const url = "http://example.com" + http.Get(url) // want `net/http\.Get must not be called` + http.Head(url) // want `net/http\.Head must not be called` + http.Post(url, "", nil) // want `net/http\.Post must not be called` + http.PostForm(url, nil) // want `net/http\.PostForm must not be called` + + cli := &http.Client{} + cli.Get(url) // want `\(\*net/http\.Client\)\.Get must not be called` + cli.Head(url) // want `\(\*net/http\.Client\)\.Head must not be called` + cli.Post(url, "", nil) // want `\(\*net/http\.Client\)\.Post must not be called` + cli.PostForm(url, nil) // want `\(\*net/http\.Client\)\.PostForm must not be called` + + req, _ := http.NewRequest(http.MethodPost, url, nil) // want `should rewrite http.NewRequestWithContext or add \(\*Request\).WithContext` + cli.Do(req) + + ctx := context.Background() + req2, _ := http.NewRequestWithContext(ctx, http.MethodPost, url, nil) // OK + cli.Do(req2) + + req3, _ := http.NewRequest(http.MethodPost, url, nil) // OK + req3 = req3.WithContext(ctx) + cli.Do(req3) + + f2 := func(req *http.Request, ctx context.Context) *http.Request { + return req + } + req4, _ := http.NewRequest(http.MethodPost, url, nil) // want `should rewrite http.NewRequestWithContext or add \(\*Request\).WithContext` + req4 = f2(req4, ctx) + cli.Do(req4) + + req5, _ := func() (*http.Request, error) { + return http.NewRequest(http.MethodPost, url, nil) // want `should rewrite http.NewRequestWithContext or add \(\*Request\).WithContext` + }() + cli.Do(req5) + +} +``` + +## Reference +- [net/http - NewRequest](https://golang.org/pkg/net/http/#NewRequest) +- [net/http - NewRequestWithContext](https://golang.org/pkg/net/http/#NewRequestWithContext) +- [net/http - Request.WithContext](https://golang.org/pkg/net/http/#Request.WithContext) + diff --git a/vendor/github.com/sonatard/noctx/go.mod b/vendor/github.com/sonatard/noctx/go.mod new file mode 100644 index 0000000..47b7901 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/go.mod @@ -0,0 +1,8 @@ +module github.com/sonatard/noctx + +go 1.13 + +require ( + github.com/gostaticanalysis/analysisutil v0.0.3 + golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9 +) diff --git a/vendor/github.com/sonatard/noctx/go.sum b/vendor/github.com/sonatard/noctx/go.sum new file mode 100644 index 0000000..f8e5b07 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/go.sum @@ -0,0 +1,16 @@ +github.com/gostaticanalysis/analysisutil v0.0.3 h1:iwp+5/UAyzQSFgQ4uR2sni99sJ8Eo9DEacKWM5pekIg= +github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9 h1:KOkk4e2xd5OeCDJGwacvr75ICCbCsShrHiqPEdsA9hg= +golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/sonatard/noctx/ngfunc/main.go b/vendor/github.com/sonatard/noctx/ngfunc/main.go new file mode 100644 index 0000000..cfeb0f0 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/ngfunc/main.go @@ -0,0 +1,57 @@ +package ngfunc + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" +) + +func Run(pass *analysis.Pass) (interface{}, error) { + ngFuncNames := []string{ + "net/http.Get", + "net/http.Head", + "net/http.Post", + "net/http.PostForm", + "(*net/http.Client).Get", + "(*net/http.Client).Head", + "(*net/http.Client).Post", + "(*net/http.Client).PostForm", + } + + ngFuncs := typeFuncs(pass, ngFuncNames) + if len(ngFuncs) == 0 { + return nil, nil + } + + reportFuncs := ngCalledFuncs(pass, ngFuncs) + report(pass, reportFuncs) + + return nil, nil +} + +func ngCalledFuncs(pass *analysis.Pass, ngFuncs []*types.Func) []*Report { + var reports []*Report + + srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs + for _, sf := range srcFuncs { + for _, b := range sf.Blocks { + for _, instr := range b.Instrs { + for _, ngFunc := range ngFuncs { + if analysisutil.Called(instr, nil, ngFunc) { + ngCalledFunc := &Report{ + Instruction: instr, + function: ngFunc, + } + reports = append(reports, ngCalledFunc) + + break + } + } + } + } + } + + return reports +} diff --git a/vendor/github.com/sonatard/noctx/ngfunc/report.go b/vendor/github.com/sonatard/noctx/ngfunc/report.go new file mode 100644 index 0000000..e500517 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/ngfunc/report.go @@ -0,0 +1,29 @@ +package ngfunc + +import ( + "fmt" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ssa" +) + +type Report struct { + Instruction ssa.Instruction + function *types.Func +} + +func (n *Report) Pos() token.Pos { + return n.Instruction.Pos() +} + +func (n *Report) Message() string { + return fmt.Sprintf("%s must not be called", n.function.FullName()) +} + +func report(pass *analysis.Pass, reports []*Report) { + for _, report := range reports { + pass.Reportf(report.Pos(), report.Message()) + } +} diff --git a/vendor/github.com/sonatard/noctx/ngfunc/types.go b/vendor/github.com/sonatard/noctx/ngfunc/types.go new file mode 100644 index 0000000..f187738 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/ngfunc/types.go @@ -0,0 +1,65 @@ +package ngfunc + +import ( + "fmt" + "go/types" + "strings" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" +) + +var errNotFound = fmt.Errorf("function not found") + +func typeFuncs(pass *analysis.Pass, funcs []string) []*types.Func { + fs := make([]*types.Func, 0, len(funcs)) + + for _, fn := range funcs { + f, err := typeFunc(pass, fn) + if err != nil { + continue + } + + fs = append(fs, f) + } + + return fs +} + +func typeFunc(pass *analysis.Pass, funcName string) (*types.Func, error) { + ss := strings.Split(strings.TrimSpace(funcName), ".") + + switch len(ss) { + case 2: + // package function: pkgname.Func + f, ok := analysisutil.ObjectOf(pass, ss[0], ss[1]).(*types.Func) + if !ok || f == nil { + return nil, errNotFound + } + + return f, nil + case 3: + // method: (*pkgname.Type).Method + pkgname := strings.TrimLeft(ss[0], "(") + typename := strings.TrimRight(ss[1], ")") + + if pkgname != "" && pkgname[0] == '*' { + pkgname = pkgname[1:] + typename = "*" + typename + } + + typ := analysisutil.TypeOf(pass, pkgname, typename) + if typ == nil { + return nil, errNotFound + } + + m := analysisutil.MethodOf(typ, ss[2]) + if m == nil { + return nil, errNotFound + } + + return m, nil + } + + return nil, errNotFound +} diff --git a/vendor/github.com/sonatard/noctx/noctx.go b/vendor/github.com/sonatard/noctx/noctx.go new file mode 100644 index 0000000..478ad88 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/noctx.go @@ -0,0 +1,31 @@ +package noctx + +import ( + "github.com/sonatard/noctx/ngfunc" + "github.com/sonatard/noctx/reqwithoutctx" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" +) + +var Analyzer = &analysis.Analyzer{ + Name: "noctx", + Doc: Doc, + Run: run, + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + }, +} + +const Doc = "noctx finds sending http request without context.Context" + +func run(pass *analysis.Pass) (interface{}, error) { + if _, err := ngfunc.Run(pass); err != nil { + return nil, err + } + + if _, err := reqwithoutctx.Run(pass); err != nil { + return nil, err + } + + return nil, nil +} diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/main.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/main.go new file mode 100644 index 0000000..b09e1de --- /dev/null +++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/main.go @@ -0,0 +1,14 @@ +package reqwithoutctx + +import ( + "golang.org/x/tools/go/analysis" +) + +func Run(pass *analysis.Pass) (interface{}, error) { + analyzer := NewAnalyzer(pass) + reports := analyzer.Exec() + + report(pass, reports) + + return nil, nil +} diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go new file mode 100644 index 0000000..1c94e31 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go @@ -0,0 +1,26 @@ +package reqwithoutctx + +import ( + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ssa" +) + +type Report struct { + Instruction ssa.Instruction +} + +func (n *Report) Pos() token.Pos { + return n.Instruction.Pos() +} + +func (n *Report) Message() string { + return "should rewrite http.NewRequestWithContext or add (*Request).WithContext" +} + +func report(pass *analysis.Pass, reports []*Report) { + for _, report := range reports { + pass.Reportf(report.Pos(), report.Message()) + } +} diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go new file mode 100644 index 0000000..3575126 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go @@ -0,0 +1,180 @@ +package reqwithoutctx + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" +) + +type Analyzer struct { + Funcs []*ssa.Function + newRequestType types.Type + requestType types.Type +} + +func NewAnalyzer(pass *analysis.Pass) *Analyzer { + newRequestType := analysisutil.TypeOf(pass, "net/http", "NewRequest") + requestType := analysisutil.TypeOf(pass, "net/http", "*Request") + + srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs + + return &Analyzer{ + Funcs: srcFuncs, + newRequestType: newRequestType, + requestType: requestType, + } +} + +func (a *Analyzer) Exec() []*Report { + if a.newRequestType == nil || a.requestType == nil { + return []*Report{} + } + + usedReqs := a.usedReqs() + newReqs := a.requestsByNewRequest() + + return a.report(usedReqs, newReqs) +} + +func (a *Analyzer) report(usedReqs map[string]*ssa.Extract, newReqs map[*ssa.Call]*ssa.Extract) []*Report { + var reports []*Report + + for _, fReq := range usedReqs { + for newRequest, req := range newReqs { + if fReq == req { + reports = append(reports, &Report{Instruction: newRequest}) + } + } + } + + return reports +} + +func (a *Analyzer) usedReqs() map[string]*ssa.Extract { + reqExts := make(map[string]*ssa.Extract) + + for _, f := range a.Funcs { + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + switch i := instr.(type) { + case *ssa.Call: + exts := a.usedReqByCall(i) + for _, ext := range exts { + key := i.String() + ext.String() + reqExts[key] = ext + } + case *ssa.UnOp: + ext := a.usedReqByUnOp(i) + if ext != nil { + key := i.String() + ext.String() + reqExts[key] = ext + } + case *ssa.Return: + exts := a.usedReqByReturn(i) + for _, ext := range exts { + key := i.String() + ext.String() + reqExts[key] = ext + } + } + } + } + } + + return reqExts +} + +func (a *Analyzer) usedReqByCall(call *ssa.Call) []*ssa.Extract { + var exts []*ssa.Extract + + // skip net/http.Request method call + if call.Common().Signature().Recv() != nil && types.Identical(call.Value().Type(), a.requestType) { + return exts + } + + args := call.Common().Args + if len(args) == 0 { + return exts + } + + for _, arg := range args { + ext, ok := arg.(*ssa.Extract) + if !ok { + continue + } + + if !types.Identical(ext.Type(), a.requestType) { + continue + } + + exts = append(exts, ext) + } + + return exts +} + +func (a *Analyzer) usedReqByUnOp(op *ssa.UnOp) *ssa.Extract { + if ext, ok := op.X.(*ssa.Extract); ok && types.Identical(ext.Type(), a.requestType) { + return ext + } + + return nil +} + +func (a *Analyzer) usedReqByReturn(ret *ssa.Return) []*ssa.Extract { + rets := ret.Results + exts := make([]*ssa.Extract, 0, len(rets)) + + for _, ret := range rets { + ext, ok := ret.(*ssa.Extract) + if !ok { + continue + } + + if types.Identical(ext.Type(), a.requestType) { + exts = append(exts, ext) + } + } + + return exts +} + +func (a *Analyzer) requestsByNewRequest() map[*ssa.Call]*ssa.Extract { + reqs := make(map[*ssa.Call]*ssa.Extract) + + for _, f := range a.Funcs { + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + ext, ok := instr.(*ssa.Extract) + if !ok { + continue + } + + if !types.Identical(ext.Type(), a.requestType) { + continue + } + + operands := ext.Operands([]*ssa.Value{}) + if len(operands) != 1 { + continue + } + + operand := *operands[0] + + f, ok := operand.(*ssa.Call) + if !ok { + continue + } + + if types.Identical(f.Call.Value.Type(), a.newRequestType) { + reqs[f] = ext + } + } + } + } + + return reqs +} diff --git a/vendor/github.com/sourcegraph/go-diff/diff/diff.go b/vendor/github.com/sourcegraph/go-diff/diff/diff.go index 646602a..0f465b9 100644 --- a/vendor/github.com/sourcegraph/go-diff/diff/diff.go +++ b/vendor/github.com/sourcegraph/go-diff/diff/diff.go @@ -1,10 +1,64 @@ package diff -import "bytes" +import ( + "bytes" + "time" +) -// NOTE: types are code-generated in diff.pb.go. +// A FileDiff represents a unified diff for a single file. +// +// A file unified diff has a header that resembles the following: +// +// --- oldname 2009-10-11 15:12:20.000000000 -0700 +// +++ newname 2009-10-11 15:12:30.000000000 -0700 +type FileDiff struct { + // the original name of the file + OrigName string + // the original timestamp (nil if not present) + OrigTime *time.Time + // the new name of the file (often same as OrigName) + NewName string + // the new timestamp (nil if not present) + NewTime *time.Time + // extended header lines (e.g., git's "new mode ", "rename from ", etc.) + Extended []string + // hunks that were changed from orig to new + Hunks []*Hunk +} -//go:generate protoc -I../../../.. -I ../../../../github.com/gogo/protobuf/protobuf -I. --gogo_out=. diff.proto +// A Hunk represents a series of changes (additions or deletions) in a file's +// unified diff. +type Hunk struct { + // starting line number in original file + OrigStartLine int32 + // number of lines the hunk applies to in the original file + OrigLines int32 + // if > 0, then the original file had a 'No newline at end of file' mark at this offset + OrigNoNewlineAt int32 + // starting line number in new file + NewStartLine int32 + // number of lines the hunk applies to in the new file + NewLines int32 + // optional section heading + Section string + // 0-indexed line offset in unified file diff (including section headers); this is + // only set when Hunks are read from entire file diff (i.e., when ReadAllHunks is + // called) This accounts for hunk headers, too, so the StartPosition of the first + // hunk will be 1. + StartPosition int32 + // hunk body (lines prefixed with '-', '+', or ' ') + Body []byte +} + +// A Stat is a diff stat that represents the number of lines added/changed/deleted. +type Stat struct { + // number of lines added + Added int32 + // number of lines changed + Changed int32 + // number of lines deleted + Deleted int32 +} // Stat computes the number of lines added/changed/deleted in all // hunks in this file's diff. @@ -54,10 +108,12 @@ func (h *Hunk) Stat() Stat { } var ( - hunkPrefix = []byte("@@ ") + hunkPrefix = []byte("@@ ") + onlyInMessagePrefix = []byte("Only in ") ) const hunkHeader = "@@ -%d,%d +%d,%d @@" +const onlyInMessage = "Only in %s: %s\n" // diffTimeParseLayout is the layout used to parse the time in unified diff file // header timestamps. diff --git a/vendor/github.com/sourcegraph/go-diff/diff/diff.pb.go b/vendor/github.com/sourcegraph/go-diff/diff/diff.pb.go deleted file mode 100644 index 2e7c27f..0000000 --- a/vendor/github.com/sourcegraph/go-diff/diff/diff.pb.go +++ /dev/null @@ -1,1059 +0,0 @@ -// Code generated by protoc-gen-gogo. -// source: diff.proto -// DO NOT EDIT! - -/* - Package diff is a generated protocol buffer package. - - It is generated from these files: - diff.proto - - It has these top-level messages: - FileDiff - Hunk - Stat -*/ -package diff - -import proto "github.com/gogo/protobuf/proto" -import fmt "fmt" -import math "math" - -// discarding unused import gogoproto "github.com/gogo/protobuf/gogoproto" -import pbtypes "sourcegraph.com/sqs/pbtypes" - -import io "io" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// A FileDiff represents a unified diff for a single file. -// -// A file unified diff has a header that resembles the following: -// -// --- oldname 2009-10-11 15:12:20.000000000 -0700 -// +++ newname 2009-10-11 15:12:30.000000000 -0700 -type FileDiff struct { - // the original name of the file - OrigName string `protobuf:"bytes,1,opt,name=OrigName,proto3" json:"OrigName,omitempty"` - // the original timestamp (nil if not present) - OrigTime *pbtypes.Timestamp `protobuf:"bytes,2,opt,name=OrigTime" json:"OrigTime,omitempty"` - // the new name of the file (often same as OrigName) - NewName string `protobuf:"bytes,3,opt,name=NewName,proto3" json:"NewName,omitempty"` - // the new timestamp (nil if not present) - NewTime *pbtypes.Timestamp `protobuf:"bytes,4,opt,name=NewTime" json:"NewTime,omitempty"` - // extended header lines (e.g., git's "new mode ", "rename from ", etc.) - Extended []string `protobuf:"bytes,5,rep,name=Extended" json:"Extended,omitempty"` - // hunks that were changed from orig to new - Hunks []*Hunk `protobuf:"bytes,6,rep,name=Hunks" json:"Hunks,omitempty"` -} - -func (m *FileDiff) Reset() { *m = FileDiff{} } -func (m *FileDiff) String() string { return proto.CompactTextString(m) } -func (*FileDiff) ProtoMessage() {} - -// A Hunk represents a series of changes (additions or deletions) in a file's -// unified diff. -type Hunk struct { - // starting line number in original file - OrigStartLine int32 `protobuf:"varint,1,opt,name=OrigStartLine,proto3" json:"OrigStartLine,omitempty"` - // number of lines the hunk applies to in the original file - OrigLines int32 `protobuf:"varint,2,opt,name=OrigLines,proto3" json:"OrigLines,omitempty"` - // if > 0, then the original file had a 'No newline at end of file' mark at this offset - OrigNoNewlineAt int32 `protobuf:"varint,3,opt,name=OrigNoNewlineAt,proto3" json:"OrigNoNewlineAt,omitempty"` - // starting line number in new file - NewStartLine int32 `protobuf:"varint,4,opt,name=NewStartLine,proto3" json:"NewStartLine,omitempty"` - // number of lines the hunk applies to in the new file - NewLines int32 `protobuf:"varint,5,opt,name=NewLines,proto3" json:"NewLines,omitempty"` - // optional section heading - Section string `protobuf:"bytes,6,opt,name=Section,proto3" json:"Section,omitempty"` - // 0-indexed line offset in unified file diff (including section headers); this is - // only set when Hunks are read from entire file diff (i.e., when ReadAllHunks is - // called) This accounts for hunk headers, too, so the StartPosition of the first - // hunk will be 1. - StartPosition int32 `protobuf:"varint,7,opt,name=StartPosition,proto3" json:"StartPosition,omitempty"` - // hunk body (lines prefixed with '-', '+', or ' ') - Body []byte `protobuf:"bytes,8,opt,name=Body,proto3" json:"Body,omitempty"` -} - -func (m *Hunk) Reset() { *m = Hunk{} } -func (m *Hunk) String() string { return proto.CompactTextString(m) } -func (*Hunk) ProtoMessage() {} - -// A Stat is a diff stat that represents the number of lines added/changed/deleted. -type Stat struct { - // number of lines added - Added int32 `protobuf:"varint,1,opt,name=Added,proto3" json:""` - // number of lines changed - Changed int32 `protobuf:"varint,2,opt,name=Changed,proto3" json:""` - // number of lines deleted - Deleted int32 `protobuf:"varint,3,opt,name=Deleted,proto3" json:""` -} - -func (m *Stat) Reset() { *m = Stat{} } -func (m *Stat) String() string { return proto.CompactTextString(m) } -func (*Stat) ProtoMessage() {} - -func (m *FileDiff) Marshal() (data []byte, err error) { - size := m.Size() - data = make([]byte, size) - n, err := m.MarshalTo(data) - if err != nil { - return nil, err - } - return data[:n], nil -} - -func (m *FileDiff) MarshalTo(data []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if len(m.OrigName) > 0 { - data[i] = 0xa - i++ - i = encodeVarintDiff(data, i, uint64(len(m.OrigName))) - i += copy(data[i:], m.OrigName) - } - if m.OrigTime != nil { - data[i] = 0x12 - i++ - i = encodeVarintDiff(data, i, uint64(m.OrigTime.Size())) - n1, err := m.OrigTime.MarshalTo(data[i:]) - if err != nil { - return 0, err - } - i += n1 - } - if len(m.NewName) > 0 { - data[i] = 0x1a - i++ - i = encodeVarintDiff(data, i, uint64(len(m.NewName))) - i += copy(data[i:], m.NewName) - } - if m.NewTime != nil { - data[i] = 0x22 - i++ - i = encodeVarintDiff(data, i, uint64(m.NewTime.Size())) - n2, err := m.NewTime.MarshalTo(data[i:]) - if err != nil { - return 0, err - } - i += n2 - } - if len(m.Extended) > 0 { - for _, s := range m.Extended { - data[i] = 0x2a - i++ - l = len(s) - for l >= 1<<7 { - data[i] = uint8(uint64(l)&0x7f | 0x80) - l >>= 7 - i++ - } - data[i] = uint8(l) - i++ - i += copy(data[i:], s) - } - } - if len(m.Hunks) > 0 { - for _, msg := range m.Hunks { - data[i] = 0x32 - i++ - i = encodeVarintDiff(data, i, uint64(msg.Size())) - n, err := msg.MarshalTo(data[i:]) - if err != nil { - return 0, err - } - i += n - } - } - return i, nil -} - -func (m *Hunk) Marshal() (data []byte, err error) { - size := m.Size() - data = make([]byte, size) - n, err := m.MarshalTo(data) - if err != nil { - return nil, err - } - return data[:n], nil -} - -func (m *Hunk) MarshalTo(data []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.OrigStartLine != 0 { - data[i] = 0x8 - i++ - i = encodeVarintDiff(data, i, uint64(m.OrigStartLine)) - } - if m.OrigLines != 0 { - data[i] = 0x10 - i++ - i = encodeVarintDiff(data, i, uint64(m.OrigLines)) - } - if m.OrigNoNewlineAt != 0 { - data[i] = 0x18 - i++ - i = encodeVarintDiff(data, i, uint64(m.OrigNoNewlineAt)) - } - if m.NewStartLine != 0 { - data[i] = 0x20 - i++ - i = encodeVarintDiff(data, i, uint64(m.NewStartLine)) - } - if m.NewLines != 0 { - data[i] = 0x28 - i++ - i = encodeVarintDiff(data, i, uint64(m.NewLines)) - } - if len(m.Section) > 0 { - data[i] = 0x32 - i++ - i = encodeVarintDiff(data, i, uint64(len(m.Section))) - i += copy(data[i:], m.Section) - } - if m.StartPosition != 0 { - data[i] = 0x38 - i++ - i = encodeVarintDiff(data, i, uint64(m.StartPosition)) - } - if m.Body != nil { - if len(m.Body) > 0 { - data[i] = 0x42 - i++ - i = encodeVarintDiff(data, i, uint64(len(m.Body))) - i += copy(data[i:], m.Body) - } - } - return i, nil -} - -func (m *Stat) Marshal() (data []byte, err error) { - size := m.Size() - data = make([]byte, size) - n, err := m.MarshalTo(data) - if err != nil { - return nil, err - } - return data[:n], nil -} - -func (m *Stat) MarshalTo(data []byte) (int, error) { - var i int - _ = i - var l int - _ = l - if m.Added != 0 { - data[i] = 0x8 - i++ - i = encodeVarintDiff(data, i, uint64(m.Added)) - } - if m.Changed != 0 { - data[i] = 0x10 - i++ - i = encodeVarintDiff(data, i, uint64(m.Changed)) - } - if m.Deleted != 0 { - data[i] = 0x18 - i++ - i = encodeVarintDiff(data, i, uint64(m.Deleted)) - } - return i, nil -} - -func encodeFixed64Diff(data []byte, offset int, v uint64) int { - data[offset] = uint8(v) - data[offset+1] = uint8(v >> 8) - data[offset+2] = uint8(v >> 16) - data[offset+3] = uint8(v >> 24) - data[offset+4] = uint8(v >> 32) - data[offset+5] = uint8(v >> 40) - data[offset+6] = uint8(v >> 48) - data[offset+7] = uint8(v >> 56) - return offset + 8 -} -func encodeFixed32Diff(data []byte, offset int, v uint32) int { - data[offset] = uint8(v) - data[offset+1] = uint8(v >> 8) - data[offset+2] = uint8(v >> 16) - data[offset+3] = uint8(v >> 24) - return offset + 4 -} -func encodeVarintDiff(data []byte, offset int, v uint64) int { - for v >= 1<<7 { - data[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - data[offset] = uint8(v) - return offset + 1 -} -func (m *FileDiff) Size() (n int) { - var l int - _ = l - l = len(m.OrigName) - if l > 0 { - n += 1 + l + sovDiff(uint64(l)) - } - if m.OrigTime != nil { - l = m.OrigTime.Size() - n += 1 + l + sovDiff(uint64(l)) - } - l = len(m.NewName) - if l > 0 { - n += 1 + l + sovDiff(uint64(l)) - } - if m.NewTime != nil { - l = m.NewTime.Size() - n += 1 + l + sovDiff(uint64(l)) - } - if len(m.Extended) > 0 { - for _, s := range m.Extended { - l = len(s) - n += 1 + l + sovDiff(uint64(l)) - } - } - if len(m.Hunks) > 0 { - for _, e := range m.Hunks { - l = e.Size() - n += 1 + l + sovDiff(uint64(l)) - } - } - return n -} - -func (m *Hunk) Size() (n int) { - var l int - _ = l - if m.OrigStartLine != 0 { - n += 1 + sovDiff(uint64(m.OrigStartLine)) - } - if m.OrigLines != 0 { - n += 1 + sovDiff(uint64(m.OrigLines)) - } - if m.OrigNoNewlineAt != 0 { - n += 1 + sovDiff(uint64(m.OrigNoNewlineAt)) - } - if m.NewStartLine != 0 { - n += 1 + sovDiff(uint64(m.NewStartLine)) - } - if m.NewLines != 0 { - n += 1 + sovDiff(uint64(m.NewLines)) - } - l = len(m.Section) - if l > 0 { - n += 1 + l + sovDiff(uint64(l)) - } - if m.StartPosition != 0 { - n += 1 + sovDiff(uint64(m.StartPosition)) - } - if m.Body != nil { - l = len(m.Body) - if l > 0 { - n += 1 + l + sovDiff(uint64(l)) - } - } - return n -} - -func (m *Stat) Size() (n int) { - var l int - _ = l - if m.Added != 0 { - n += 1 + sovDiff(uint64(m.Added)) - } - if m.Changed != 0 { - n += 1 + sovDiff(uint64(m.Changed)) - } - if m.Deleted != 0 { - n += 1 + sovDiff(uint64(m.Deleted)) - } - return n -} - -func sovDiff(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n -} -func sozDiff(x uint64) (n int) { - return sovDiff(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (m *FileDiff) Unmarshal(data []byte) error { - l := len(data) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: FileDiff: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: FileDiff: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field OrigName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - stringLen |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthDiff - } - postIndex := iNdEx + intStringLen - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.OrigName = string(data[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field OrigTime", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - msglen |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthDiff - } - postIndex := iNdEx + msglen - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.OrigTime == nil { - m.OrigTime = &pbtypes.Timestamp{} - } - if err := m.OrigTime.Unmarshal(data[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field NewName", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - stringLen |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthDiff - } - postIndex := iNdEx + intStringLen - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.NewName = string(data[iNdEx:postIndex]) - iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field NewTime", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - msglen |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthDiff - } - postIndex := iNdEx + msglen - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.NewTime == nil { - m.NewTime = &pbtypes.Timestamp{} - } - if err := m.NewTime.Unmarshal(data[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 5: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Extended", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - stringLen |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthDiff - } - postIndex := iNdEx + intStringLen - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Extended = append(m.Extended, string(data[iNdEx:postIndex])) - iNdEx = postIndex - case 6: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Hunks", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - msglen |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthDiff - } - postIndex := iNdEx + msglen - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Hunks = append(m.Hunks, &Hunk{}) - if err := m.Hunks[len(m.Hunks)-1].Unmarshal(data[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipDiff(data[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthDiff - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *Hunk) Unmarshal(data []byte) error { - l := len(data) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Hunk: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Hunk: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field OrigStartLine", wireType) - } - m.OrigStartLine = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - m.OrigStartLine |= (int32(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - case 2: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field OrigLines", wireType) - } - m.OrigLines = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - m.OrigLines |= (int32(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - case 3: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field OrigNoNewlineAt", wireType) - } - m.OrigNoNewlineAt = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - m.OrigNoNewlineAt |= (int32(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - case 4: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field NewStartLine", wireType) - } - m.NewStartLine = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - m.NewStartLine |= (int32(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - case 5: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field NewLines", wireType) - } - m.NewLines = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - m.NewLines |= (int32(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - case 6: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Section", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - stringLen |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthDiff - } - postIndex := iNdEx + intStringLen - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Section = string(data[iNdEx:postIndex]) - iNdEx = postIndex - case 7: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field StartPosition", wireType) - } - m.StartPosition = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - m.StartPosition |= (int32(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - case 8: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Body", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - byteLen |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthDiff - } - postIndex := iNdEx + byteLen - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Body = append([]byte{}, data[iNdEx:postIndex]...) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipDiff(data[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthDiff - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *Stat) Unmarshal(data []byte) error { - l := len(data) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Stat: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Stat: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Added", wireType) - } - m.Added = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - m.Added |= (int32(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - case 2: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Changed", wireType) - } - m.Changed = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - m.Changed |= (int32(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - case 3: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Deleted", wireType) - } - m.Deleted = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDiff - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - m.Deleted |= (int32(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipDiff(data[iNdEx:]) - if err != nil { - return err - } - if skippy < 0 { - return ErrInvalidLengthDiff - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipDiff(data []byte) (n int, err error) { - l := len(data) - iNdEx := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowDiff - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowDiff - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if data[iNdEx-1] < 0x80 { - break - } - } - return iNdEx, nil - case 1: - iNdEx += 8 - return iNdEx, nil - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowDiff - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - iNdEx += length - if length < 0 { - return 0, ErrInvalidLengthDiff - } - return iNdEx, nil - case 3: - for { - var innerWire uint64 - var start int = iNdEx - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowDiff - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := data[iNdEx] - iNdEx++ - innerWire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - innerWireType := int(innerWire & 0x7) - if innerWireType == 4 { - break - } - next, err := skipDiff(data[start:]) - if err != nil { - return 0, err - } - iNdEx = start + next - } - return iNdEx, nil - case 4: - return iNdEx, nil - case 5: - iNdEx += 4 - return iNdEx, nil - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - } - panic("unreachable") -} - -var ( - ErrInvalidLengthDiff = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowDiff = fmt.Errorf("proto: integer overflow") -) diff --git a/vendor/github.com/sourcegraph/go-diff/diff/diff.proto b/vendor/github.com/sourcegraph/go-diff/diff/diff.proto deleted file mode 100644 index 8868970..0000000 --- a/vendor/github.com/sourcegraph/go-diff/diff/diff.proto +++ /dev/null @@ -1,81 +0,0 @@ -syntax = "proto3"; -package diff; - -import "github.com/gogo/protobuf/gogoproto/gogo.proto"; -import "sourcegraph.com/sqs/pbtypes/timestamp.proto"; - -option (gogoproto.goproto_getters_all) = false; -option (gogoproto.unmarshaler_all) = true; -option (gogoproto.marshaler_all) = true; -option (gogoproto.sizer_all) = true; - -// A FileDiff represents a unified diff for a single file. -// -// A file unified diff has a header that resembles the following: -// -// --- oldname 2009-10-11 15:12:20.000000000 -0700 -// +++ newname 2009-10-11 15:12:30.000000000 -0700 -message FileDiff { - // the original name of the file - string OrigName = 1; - - // the original timestamp (nil if not present) - pbtypes.Timestamp OrigTime = 2; - - // the new name of the file (often same as OrigName) - string NewName = 3; - - // the new timestamp (nil if not present) - pbtypes.Timestamp NewTime = 4; - - // extended header lines (e.g., git's "new mode ", "rename from ", etc.) - repeated string Extended = 5; - - // hunks that were changed from orig to new - repeated Hunk Hunks = 6; -} - - -// A Hunk represents a series of changes (additions or deletions) in a file's -// unified diff. -message Hunk { - // starting line number in original file - int32 OrigStartLine = 1; - - // number of lines the hunk applies to in the original file - int32 OrigLines = 2; - - // if > 0, then the original file had a 'No newline at end of file' mark at this offset - int32 OrigNoNewlineAt = 3; - - // starting line number in new file - int32 NewStartLine = 4; - - // number of lines the hunk applies to in the new file - int32 NewLines = 5; - - // optional section heading - string Section = 6; - - // 0-indexed line offset in unified file diff (including section headers); this is - // only set when Hunks are read from entire file diff (i.e., when ReadAllHunks is - // called) This accounts for hunk headers, too, so the StartPosition of the first - // hunk will be 1. - int32 StartPosition = 7; - - // hunk body (lines prefixed with '-', '+', or ' ') - bytes Body = 8; -} - -// A Stat is a diff stat that represents the number of lines added/changed/deleted. -message Stat { - // number of lines added - int32 Added = 1 [(gogoproto.jsontag) = ""]; - - // number of lines changed - int32 Changed = 2 [(gogoproto.jsontag) = ""]; - - // number of lines deleted - int32 Deleted = 3 [(gogoproto.jsontag) = ""]; -} - diff --git a/vendor/github.com/sourcegraph/go-diff/diff/parse.go b/vendor/github.com/sourcegraph/go-diff/diff/parse.go index 1e77df4..8d5cfc2 100644 --- a/vendor/github.com/sourcegraph/go-diff/diff/parse.go +++ b/vendor/github.com/sourcegraph/go-diff/diff/parse.go @@ -6,15 +6,16 @@ import ( "errors" "fmt" "io" + "path/filepath" + "strconv" "strings" "time" - - "sourcegraph.com/sqs/pbtypes" ) -// ParseMultiFileDiff parses a multi-file unified diff. It returns an error if parsing failed as a whole, but does its -// best to parse as many files in the case of per-file errors. In the case of non-fatal per-file errors, the error -// return value is null and the Errs field in the returned MultiFileDiff is set. +// ParseMultiFileDiff parses a multi-file unified diff. It returns an error if +// parsing failed as a whole, but does its best to parse as many files in the +// case of per-file errors. If it cannot detect when the diff of the next file +// begins, the hunks are added to the FileDiff of the previous file. func ParseMultiFileDiff(diff []byte) ([]*FileDiff, error) { return NewMultiFileDiffReader(bytes.NewReader(diff)).ReadAllFiles() } @@ -60,6 +61,7 @@ func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { if e.Err == ErrNoFileHeader || e.Err == ErrExtendedHeadersEOF { return nil, io.EOF } + return nil, err case OverflowError: r.nextFileFirstLine = []byte(e) @@ -70,6 +72,12 @@ func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { } } + // FileDiff is added/deleted file + // No further collection of hunks needed + if fd.NewName == "" { + return fd, nil + } + // Before reading hunks, check to see if there are any. If there // aren't any, and there's another file after this file in the // diff, then the hunks reader will complain ErrNoHunkHeader. It's @@ -78,7 +86,7 @@ func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { // need to perform the check here. hr := fr.HunksReader() line, err := readLine(r.reader) - if err != nil { + if err != nil && err != io.EOF { return fd, err } line = bytes.TrimSuffix(line, []byte{'\n'}) @@ -196,12 +204,10 @@ func (r *FileDiffReader) ReadAllHeaders() (*FileDiff, error) { return nil, err } if origTime != nil { - ts := pbtypes.NewTimestamp(*origTime) - fd.OrigTime = &ts + fd.OrigTime = origTime } if newTime != nil { - ts := pbtypes.NewTimestamp(*newTime) - fd.NewTime = &ts + fd.NewTime = newTime } return fd, nil @@ -221,8 +227,16 @@ func (r *FileDiffReader) HunksReader() *HunksReader { // ReadFileHeaders reads the unified file diff header (the lines that // start with "---" and "+++" with the orig/new file names and -// timestamps). +// timestamps). Or which starts with "Only in " with dir path and filename. +// "Only in" message is supported in POSIX locale: https://pubs.opengroup.org/onlinepubs/9699919799/utilities/diff.html#tag_20_34_10 func (r *FileDiffReader) ReadFileHeaders() (origName, newName string, origTimestamp, newTimestamp *time.Time, err error) { + if r.fileHeaderLine != nil { + if isOnlyMessage, source, filename := parseOnlyInMessage(r.fileHeaderLine); isOnlyMessage { + return filepath.Join(string(source), string(filename)), + "", nil, nil, nil + } + } + origName, origTimestamp, err = r.readOneFileHeader([]byte("--- ")) if err != nil { return "", "", nil, nil, err @@ -233,6 +247,15 @@ func (r *FileDiffReader) ReadFileHeaders() (origName, newName string, origTimest return "", "", nil, nil, err } + unquotedOrigName, err := strconv.Unquote(origName) + if err == nil { + origName = unquotedOrigName + } + unquotedNewName, err := strconv.Unquote(newName) + if err == nil { + newName = unquotedNewName + } + return origName, newName, origTimestamp, newTimestamp, nil } @@ -282,7 +305,7 @@ func (r *FileDiffReader) readOneFileHeader(prefix []byte) (filename string, time type OverflowError string func (e OverflowError) Error() string { - return fmt.Sprintf("overflowed into next file: %s", e) + return fmt.Sprintf("overflowed into next file: %s", string(e)) } // ReadExtendedHeaders reads the extended header lines, if any, from a @@ -319,6 +342,12 @@ func (r *FileDiffReader) ReadExtendedHeaders() ([]string, error) { return xheaders, nil } + // Reached message that file is added/deleted + if isOnlyInMessage, _, _ := parseOnlyInMessage(line); isOnlyInMessage { + r.fileHeaderLine = line // pass to readOneFileHeader (see fileHeaderLine field doc) + return xheaders, nil + } + r.line++ r.offset += int64(len(line)) xheaders = append(xheaders, string(line)) @@ -328,31 +357,59 @@ func (r *FileDiffReader) ReadExtendedHeaders() ([]string, error) { // handleEmpty detects when FileDiff was an empty diff and will not have any hunks // that follow. It updates fd fields from the parsed extended headers. func handleEmpty(fd *FileDiff) (wasEmpty bool) { + var err error + lineCount := len(fd.Extended) + if lineCount > 0 && !strings.HasPrefix(fd.Extended[0], "diff --git ") { + return false + } switch { - case (len(fd.Extended) == 3 || len(fd.Extended) == 4 && strings.HasPrefix(fd.Extended[3], "Binary files ")) && - strings.HasPrefix(fd.Extended[1], "new file mode ") && strings.HasPrefix(fd.Extended[0], "diff --git "): + case (lineCount == 3 || lineCount == 4 && strings.HasPrefix(fd.Extended[3], "Binary files ") || lineCount > 4 && strings.HasPrefix(fd.Extended[3], "GIT binary patch")) && + strings.HasPrefix(fd.Extended[1], "new file mode "): names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) fd.OrigName = "/dev/null" - fd.NewName = names[1] + fd.NewName, err = strconv.Unquote(names[1]) + if err != nil { + fd.NewName = names[1] + } return true - case (len(fd.Extended) == 3 || len(fd.Extended) == 4 && strings.HasPrefix(fd.Extended[3], "Binary files ")) && - strings.HasPrefix(fd.Extended[1], "deleted file mode ") && strings.HasPrefix(fd.Extended[0], "diff --git "): + case (lineCount == 3 || lineCount == 4 && strings.HasPrefix(fd.Extended[3], "Binary files ") || lineCount > 4 && strings.HasPrefix(fd.Extended[3], "GIT binary patch")) && + strings.HasPrefix(fd.Extended[1], "deleted file mode "): names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) - fd.OrigName = names[0] + fd.OrigName, err = strconv.Unquote(names[0]) + if err != nil { + fd.OrigName = names[0] + } fd.NewName = "/dev/null" return true - case len(fd.Extended) == 4 && strings.HasPrefix(fd.Extended[2], "rename from ") && strings.HasPrefix(fd.Extended[3], "rename to ") && strings.HasPrefix(fd.Extended[0], "diff --git "): + case lineCount == 4 && strings.HasPrefix(fd.Extended[2], "rename from ") && strings.HasPrefix(fd.Extended[3], "rename to "): names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) - fd.OrigName = names[0] - fd.NewName = names[1] + fd.OrigName, err = strconv.Unquote(names[0]) + if err != nil { + fd.OrigName = names[0] + } + fd.NewName, err = strconv.Unquote(names[1]) + if err != nil { + fd.NewName = names[1] + } return true - case len(fd.Extended) == 3 && strings.HasPrefix(fd.Extended[2], "Binary files ") && strings.HasPrefix(fd.Extended[0], "diff --git "): + case lineCount == 6 && strings.HasPrefix(fd.Extended[5], "Binary files ") && strings.HasPrefix(fd.Extended[2], "rename from ") && strings.HasPrefix(fd.Extended[3], "rename to "): names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) fd.OrigName = names[0] fd.NewName = names[1] return true + case lineCount == 3 && strings.HasPrefix(fd.Extended[2], "Binary files ") || lineCount > 3 && strings.HasPrefix(fd.Extended[2], "GIT binary patch"): + names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) + fd.OrigName, err = strconv.Unquote(names[0]) + if err != nil { + fd.OrigName = names[0] + } + fd.NewName, err = strconv.Unquote(names[1]) + if err != nil { + fd.NewName = names[1] + } + return true default: return false } @@ -369,6 +426,10 @@ var ( // ErrExtendedHeadersEOF is when an EOF was encountered while reading extended file headers, which means that there were no ---/+++ headers encountered before hunks (if any) began. ErrExtendedHeadersEOF = errors.New("expected file header while reading extended headers, got EOF") + + // ErrBadOnlyInMessage is when a file have a malformed `only in` message + // Should be in format `Only in {source}: {filename}` + ErrBadOnlyInMessage = errors.New("bad 'only in' message") ) // ParseHunks parses hunks from a unified diff. The diff must consist @@ -453,9 +514,22 @@ func (r *HunksReader) ReadHunk() (*Hunk, error) { r.hunk.Section = section } else { // Read hunk body line. + + // If the line starts with `---` and the next one with `+++` we're + // looking at a non-extended file header and need to abort. + if bytes.HasPrefix(line, []byte("---")) { + ok, err := peekPrefix(r.reader, "+++") + if err != nil { + return r.hunk, err + } + if ok { + return r.hunk, &ParseError{r.line, r.offset, &ErrBadHunkLine{Line: line}} + } + } + + // If the line starts with the hunk prefix, this hunk is complete. if bytes.HasPrefix(line, hunkPrefix) { - // Saw start of new hunk, so this hunk is - // complete. But we've already read in the next hunk's + // But we've already read in the next hunk's // header, so we need to be sure that the next call to // ReadHunk starts with that header. r.nextHunkHeaderLine = line @@ -519,6 +593,19 @@ func linePrefix(c byte) bool { return false } +// peekPrefix peeks into the given reader to check whether the next +// bytes match the given prefix. +func peekPrefix(reader *bufio.Reader, prefix string) (bool, error) { + next, err := reader.Peek(len(prefix)) + if err != nil { + if err == io.EOF { + return false, nil + } + return false, err + } + return bytes.HasPrefix(next, []byte(prefix)), nil +} + // normalizeHeader takes a header of the form: // "@@ -linestart[,chunksize] +linestart[,chunksize] @@ section" // and returns two strings, with the first in the form: @@ -578,6 +665,19 @@ func (r *HunksReader) ReadAllHunks() ([]*Hunk, error) { } } +// parseOnlyInMessage checks if line is a "Only in {source}: {filename}" and returns source and filename +func parseOnlyInMessage(line []byte) (bool, []byte, []byte) { + if !bytes.HasPrefix(line, onlyInMessagePrefix) { + return false, nil, nil + } + line = line[len(onlyInMessagePrefix):] + idx := bytes.Index(line, []byte(": ")) + if idx < 0 { + return false, nil, nil + } + return true, line[:idx], line[idx+2:] +} + // A ParseError is a description of a unified diff syntax error. type ParseError struct { Line int // Line where the error occurred diff --git a/vendor/github.com/sourcegraph/go-diff/diff/print.go b/vendor/github.com/sourcegraph/go-diff/diff/print.go index d440cb9..012651a 100644 --- a/vendor/github.com/sourcegraph/go-diff/diff/print.go +++ b/vendor/github.com/sourcegraph/go-diff/diff/print.go @@ -4,9 +4,8 @@ import ( "bytes" "fmt" "io" + "path/filepath" "time" - - "sourcegraph.com/sqs/pbtypes" ) // PrintMultiFileDiff prints a multi-file diff in unified diff format. @@ -36,14 +35,24 @@ func PrintFileDiff(d *FileDiff) ([]byte, error) { } } + // FileDiff is added/deleted file + // No further hunks printing needed + if d.NewName == "" { + _, err := fmt.Fprintf(&buf, onlyInMessage, filepath.Dir(d.OrigName), filepath.Base(d.OrigName)) + if err != nil { + return nil, err + } + return buf.Bytes(), nil + } + if d.Hunks == nil { return buf.Bytes(), nil } - if err := printFileHeader(&buf, "--- ", d.OrigName, timePtr(d.OrigTime)); err != nil { + if err := printFileHeader(&buf, "--- ", d.OrigName, d.OrigTime); err != nil { return nil, err } - if err := printFileHeader(&buf, "+++ ", d.NewName, timePtr(d.NewTime)); err != nil { + if err := printFileHeader(&buf, "+++ ", d.NewName, d.NewTime); err != nil { return nil, err } @@ -58,14 +67,6 @@ func PrintFileDiff(d *FileDiff) ([]byte, error) { return buf.Bytes(), nil } -func timePtr(ts *pbtypes.Timestamp) *time.Time { - if ts == nil { - return nil - } - t := ts.Time() - return &t -} - func printFileHeader(w io.Writer, prefix string, filename string, timestamp *time.Time) error { if _, err := fmt.Fprint(w, prefix, filename); err != nil { return err diff --git a/vendor/github.com/spf13/cobra/.gitignore b/vendor/github.com/spf13/cobra/.gitignore index 3b053c5..c7b459e 100644 --- a/vendor/github.com/spf13/cobra/.gitignore +++ b/vendor/github.com/spf13/cobra/.gitignore @@ -32,7 +32,8 @@ Session.vim tags *.exe - cobra.test +bin -.idea/* +.idea/ +*.iml diff --git a/vendor/github.com/spf13/cobra/.travis.yml b/vendor/github.com/spf13/cobra/.travis.yml index 38b85f4..a9bd4e5 100644 --- a/vendor/github.com/spf13/cobra/.travis.yml +++ b/vendor/github.com/spf13/cobra/.travis.yml @@ -3,29 +3,27 @@ language: go stages: - diff - test + - build go: - - 1.10.x - - 1.11.x - 1.12.x + - 1.13.x - tip +before_install: + - go get -u github.com/kyoh86/richgo + - go get -u github.com/mitchellh/gox + matrix: allow_failures: - go: tip include: - stage: diff - go: 1.12.x - script: diff -u <(echo -n) <(gofmt -d -s .) + go: 1.13.x + script: make fmt + - stage: build + go: 1.13.x + script: make cobra_generator -before_install: - - mkdir -p bin - - curl -Lso bin/shellcheck https://github.com/caarlos0/shellcheck-docker/releases/download/v0.6.0/shellcheck - - chmod +x bin/shellcheck - - go get -u github.com/kyoh86/richgo -script: - - PATH=$PATH:$PWD/bin richgo test -v ./... - - go build - - if [ -z $NOVET ]; then - diff -u <(echo -n) <(go vet . 2>&1 | grep -vE 'ExampleCommand|bash_completions.*Fprint'); - fi +script: + - make test diff --git a/vendor/github.com/spf13/cobra/CHANGELOG.md b/vendor/github.com/spf13/cobra/CHANGELOG.md new file mode 100644 index 0000000..742d6d6 --- /dev/null +++ b/vendor/github.com/spf13/cobra/CHANGELOG.md @@ -0,0 +1,22 @@ +# Cobra Changelog + +## Pending +* Fix man page doc generation - no auto generated tag when `cmd.DisableAutoGenTag = true` @jpmcb + +## v1.0.0 +Announcing v1.0.0 of Cobra. 🎉 +**Notable Changes** +* Fish completion (including support for Go custom completion) @marckhouzam +* API (urgent): Rename BashCompDirectives to ShellCompDirectives @marckhouzam +* Remove/replace SetOutput on Command - deprecated @jpmcb +* add support for autolabel stale PR @xchapter7x +* Add Labeler Actions @xchapter7x +* Custom completions coded in Go (instead of Bash) @marckhouzam +* Partial Revert of #922 @jharshman +* Add Makefile to project @jharshman +* Correct documentation for InOrStdin @desponda +* Apply formatting to templates @jharshman +* Revert change so help is printed on stdout again @marckhouzam +* Update md2man to v2.0.0 @pdf +* update viper to v1.4.0 @umarcor +* Update cmd/root.go example in README.md @jharshman diff --git a/vendor/github.com/spf13/cobra/CONTRIBUTING.md b/vendor/github.com/spf13/cobra/CONTRIBUTING.md new file mode 100644 index 0000000..6f356e6 --- /dev/null +++ b/vendor/github.com/spf13/cobra/CONTRIBUTING.md @@ -0,0 +1,50 @@ +# Contributing to Cobra + +Thank you so much for contributing to Cobra. We appreciate your time and help. +Here are some guidelines to help you get started. + +## Code of Conduct + +Be kind and respectful to the members of the community. Take time to educate +others who are seeking help. Harassment of any kind will not be tolerated. + +## Questions + +If you have questions regarding Cobra, feel free to ask it in the community +[#cobra Slack channel][cobra-slack] + +## Filing a bug or feature + +1. Before filing an issue, please check the existing issues to see if a + similar one was already opened. If there is one already opened, feel free + to comment on it. +1. If you believe you've found a bug, please provide detailed steps of + reproduction, the version of Cobra and anything else you believe will be + useful to help troubleshoot it (e.g. OS environment, environment variables, + etc...). Also state the current behavior vs. the expected behavior. +1. If you'd like to see a feature or an enhancement please open an issue with + a clear title and description of what the feature is and why it would be + beneficial to the project and its users. + +## Submitting changes + +1. CLA: Upon submitting a Pull Request (PR), contributors will be prompted to + sign a CLA. Please sign the CLA :slightly_smiling_face: +1. Tests: If you are submitting code, please ensure you have adequate tests + for the feature. Tests can be run via `go test ./...` or `make test`. +1. Since this is golang project, ensure the new code is properly formatted to + ensure code consistency. Run `make all`. + +### Quick steps to contribute + +1. Fork the project. +1. Download your fork to your PC (`git clone https://github.com/your_username/cobra && cd cobra`) +1. Create your feature branch (`git checkout -b my-new-feature`) +1. Make changes and run tests (`make test`) +1. Add them to staging (`git add .`) +1. Commit your changes (`git commit -m 'Add some feature'`) +1. Push to the branch (`git push origin my-new-feature`) +1. Create new pull request + + +[cobra-slack]: https://gophers.slack.com/archives/CD3LP1199 diff --git a/vendor/github.com/spf13/cobra/Makefile b/vendor/github.com/spf13/cobra/Makefile new file mode 100644 index 0000000..e9740d1 --- /dev/null +++ b/vendor/github.com/spf13/cobra/Makefile @@ -0,0 +1,36 @@ +BIN="./bin" +SRC=$(shell find . -name "*.go") + +ifeq (, $(shell which richgo)) +$(warning "could not find richgo in $(PATH), run: go get github.com/kyoh86/richgo") +endif + +.PHONY: fmt vet test cobra_generator install_deps clean + +default: all + +all: fmt vet test cobra_generator + +fmt: + $(info ******************** checking formatting ********************) + @test -z $(shell gofmt -l $(SRC)) || (gofmt -d $(SRC); exit 1) + +test: install_deps vet + $(info ******************** running tests ********************) + richgo test -v ./... + +cobra_generator: install_deps + $(info ******************** building generator ********************) + mkdir -p $(BIN) + make -C cobra all + +install_deps: + $(info ******************** downloading dependencies ********************) + go get -v ./... + +vet: + $(info ******************** vetting ********************) + go vet ./... + +clean: + rm -rf $(BIN) diff --git a/vendor/github.com/spf13/cobra/README.md b/vendor/github.com/spf13/cobra/README.md index 60c5a42..3cf1b25 100644 --- a/vendor/github.com/spf13/cobra/README.md +++ b/vendor/github.com/spf13/cobra/README.md @@ -2,33 +2,14 @@ Cobra is both a library for creating powerful modern CLI applications as well as a program to generate applications and command files. -Many of the most widely used Go projects are built using Cobra, such as: -[Kubernetes](http://kubernetes.io/), -[Hugo](http://gohugo.io), -[rkt](https://github.com/coreos/rkt), -[etcd](https://github.com/coreos/etcd), -[Moby (former Docker)](https://github.com/moby/moby), -[Docker (distribution)](https://github.com/docker/distribution), -[OpenShift](https://www.openshift.com/), -[Delve](https://github.com/derekparker/delve), -[GopherJS](http://www.gopherjs.org/), -[CockroachDB](http://www.cockroachlabs.com/), -[Bleve](http://www.blevesearch.com/), -[ProjectAtomic (enterprise)](http://www.projectatomic.io/), -[Giant Swarm's gsctl](https://github.com/giantswarm/gsctl), -[Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack), -[rclone](http://rclone.org/), -[nehm](https://github.com/bogem/nehm), -[Pouch](https://github.com/alibaba/pouch), -[Istio](https://istio.io), -[Prototool](https://github.com/uber/prototool), -[mattermost-server](https://github.com/mattermost/mattermost-server), -[Gardener](https://github.com/gardener/gardenctl), -etc. +Cobra is used in many Go projects such as [Kubernetes](http://kubernetes.io/), +[Hugo](https://gohugo.io), and [Github CLI](https://github.com/cli/cli) to +name a few. [This list](./projects_using_cobra.md) contains a more extensive list of projects using Cobra. [![Build Status](https://travis-ci.org/spf13/cobra.svg "Travis CI status")](https://travis-ci.org/spf13/cobra) -[![CircleCI status](https://circleci.com/gh/spf13/cobra.png?circle-token=:circle-token "CircleCI status")](https://circleci.com/gh/spf13/cobra) [![GoDoc](https://godoc.org/github.com/spf13/cobra?status.svg)](https://godoc.org/github.com/spf13/cobra) +[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/cobra)](https://goreportcard.com/report/github.com/spf13/cobra) +[![Slack](https://img.shields.io/badge/Slack-cobra-brightgreen)](https://gophers.slack.com/archives/CD3LP1199) # Table of Contents @@ -48,9 +29,8 @@ etc. * [PreRun and PostRun Hooks](#prerun-and-postrun-hooks) * [Suggestions when "unknown command" happens](#suggestions-when-unknown-command-happens) * [Generating documentation for your command](#generating-documentation-for-your-command) - * [Generating bash completions](#generating-bash-completions) - * [Generating zsh completions](#generating-zsh-completions) -- [Contributing](#contributing) + * [Generating shell completions](#generating-shell-completions) +- [Contributing](CONTRIBUTING.md) - [License](#license) # Overview @@ -70,7 +50,7 @@ Cobra provides: * Intelligent suggestions (`app srver`... did you mean `app server`?) * Automatic help generation for commands and flags * Automatic help flag recognition of `-h`, `--help`, etc. -* Automatically generated bash autocomplete for your application +* Automatically generated shell autocomplete for your application (bash, zsh, fish, powershell) * Automatically generated man pages for your application * Command aliases so you can change things without breaking them * The flexibility to define your own help, usage, etc. @@ -128,7 +108,7 @@ Using Cobra is easy. First, use `go get` to install the latest version of the library. This command will install the `cobra` generator executable along with the library and its dependencies: - go get -u github.com/spf13/cobra/cobra + go get -u github.com/spf13/cobra Next, include Cobra in your application: @@ -197,7 +177,7 @@ var rootCmd = &cobra.Command{ func Execute() { if err := rootCmd.Execute(); err != nil { - fmt.Println(err) + fmt.Fprintln(os.Stderr, err) os.Exit(1) } } @@ -208,51 +188,78 @@ You will additionally define flags and handle configuration in your init() funct For example cmd/root.go: ```go +package cmd + import ( - "fmt" - "os" + "fmt" + "os" - homedir "github.com/mitchellh/go-homedir" - "github.com/spf13/cobra" - "github.com/spf13/viper" + homedir "github.com/mitchellh/go-homedir" + "github.com/spf13/cobra" + "github.com/spf13/viper" ) -func init() { - cobra.OnInitialize(initConfig) - rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)") - rootCmd.PersistentFlags().StringVarP(&projectBase, "projectbase", "b", "", "base project directory eg. github.com/spf13/") - rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "Author name for copyright attribution") - rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "Name of license for the project (can provide `licensetext` in config)") - rootCmd.PersistentFlags().Bool("viper", true, "Use Viper for configuration") - viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author")) - viper.BindPFlag("projectbase", rootCmd.PersistentFlags().Lookup("projectbase")) - viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper")) - viper.SetDefault("author", "NAME HERE ") - viper.SetDefault("license", "apache") +var ( + // Used for flags. + cfgFile string + userLicense string + + rootCmd = &cobra.Command{ + Use: "cobra", + Short: "A generator for Cobra based Applications", + Long: `Cobra is a CLI library for Go that empowers applications. +This application is a tool to generate the needed files +to quickly create a Cobra application.`, + } +) + +// Execute executes the root command. +func Execute() error { + return rootCmd.Execute() } -func initConfig() { - // Don't forget to read config either from cfgFile or from home directory! - if cfgFile != "" { - // Use config file from the flag. - viper.SetConfigFile(cfgFile) - } else { - // Find home directory. - home, err := homedir.Dir() - if err != nil { - fmt.Println(err) - os.Exit(1) - } +func init() { + cobra.OnInitialize(initConfig) + + rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)") + rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "author name for copyright attribution") + rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "name of license for the project") + rootCmd.PersistentFlags().Bool("viper", true, "use Viper for configuration") + viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author")) + viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper")) + viper.SetDefault("author", "NAME HERE ") + viper.SetDefault("license", "apache") + + rootCmd.AddCommand(addCmd) + rootCmd.AddCommand(initCmd) +} - // Search config in home directory with name ".cobra" (without extension). - viper.AddConfigPath(home) - viper.SetConfigName(".cobra") - } +func er(msg interface{}) { + fmt.Println("Error:", msg) + os.Exit(1) +} - if err := viper.ReadInConfig(); err != nil { - fmt.Println("Can't read config:", err) - os.Exit(1) - } +func initConfig() { + if cfgFile != "" { + // Use config file from the flag. + viper.SetConfigFile(cfgFile) + } else { + // Find home directory. + home, err := homedir.Dir() + if err != nil { + er(err) + } + + // Search config in home directory with name ".cobra" (without extension). + viper.AddConfigPath(home) + viper.SetConfigName(".cobra") + } + + viper.AutomaticEnv() + + if err := viper.ReadInConfig(); err == nil { + fmt.Println("Using config file:", viper.ConfigFileUsed()) + } } ``` @@ -306,6 +313,37 @@ var versionCmd = &cobra.Command{ } ``` +### Returning and handling errors + +If you wish to return an error to the caller of a command, `RunE` can be used. + +```go +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(tryCmd) +} + +var tryCmd = &cobra.Command{ + Use: "try", + Short: "Try and possibly fail at something", + RunE: func(cmd *cobra.Command, args []string) error { + if err := someFunc(); err != nil { + return err + } + return nil + }, +} +``` + +The error can then be caught at the execute function call. + ## Working with Flags Flags provide modifiers to control how the action command operates. @@ -381,6 +419,12 @@ rootCmd.Flags().StringVarP(&Region, "region", "r", "", "AWS region (required)") rootCmd.MarkFlagRequired("region") ``` +Or, for persistent flags: +```go +rootCmd.PersistentFlags().StringVarP(&Region, "region", "r", "", "AWS region (required)") +rootCmd.MarkPersistentFlagRequired("region") +``` + ## Positional and Custom Arguments Validation of positional arguments can be specified using the `Args` field @@ -459,7 +503,7 @@ For many years people have printed back to the screen.`, Echo works a lot like print, except it has a child command.`, Args: cobra.MinimumNArgs(1), Run: func(cmd *cobra.Command, args []string) { - fmt.Println("Print: " + strings.Join(args, " ")) + fmt.Println("Echo: " + strings.Join(args, " ")) }, } @@ -711,30 +755,11 @@ Run 'kubectl help' for usage. ## Generating documentation for your command -Cobra can generate documentation based on subcommands, flags, etc. in the following formats: - -- [Markdown](doc/md_docs.md) -- [ReStructured Text](doc/rest_docs.md) -- [Man Page](doc/man_docs.md) - -## Generating bash completions - -Cobra can generate a bash-completion file. If you add more information to your command, these completions can be amazingly powerful and flexible. Read more about it in [Bash Completions](bash_completions.md). - -## Generating zsh completions - -Cobra can generate zsh-completion file. Read more about it in -[Zsh Completions](zsh_completions.md). +Cobra can generate documentation based on subcommands, flags, etc. Read more about it in the [docs generation documentation](doc/README.md). -# Contributing +## Generating shell completions -1. Fork it -2. Download your fork to your PC (`git clone https://github.com/your_username/cobra && cd cobra`) -3. Create your feature branch (`git checkout -b my-new-feature`) -4. Make changes and add them (`git add .`) -5. Commit your changes (`git commit -m 'Add some feature'`) -6. Push to the branch (`git push origin my-new-feature`) -7. Create new pull request +Cobra can generate a shell-completion file for the following shells: Bash, Zsh, Fish, Powershell. If you add more information to your commands, these completions can be amazingly powerful and flexible. Read more about it in [Shell Completions](shell_completions.md). # License diff --git a/vendor/github.com/spf13/cobra/args.go b/vendor/github.com/spf13/cobra/args.go index c4d820b..70e9b26 100644 --- a/vendor/github.com/spf13/cobra/args.go +++ b/vendor/github.com/spf13/cobra/args.go @@ -2,6 +2,7 @@ package cobra import ( "fmt" + "strings" ) type PositionalArgs func(cmd *Command, args []string) error @@ -34,8 +35,15 @@ func NoArgs(cmd *Command, args []string) error { // OnlyValidArgs returns an error if any args are not in the list of ValidArgs. func OnlyValidArgs(cmd *Command, args []string) error { if len(cmd.ValidArgs) > 0 { + // Remove any description that may be included in ValidArgs. + // A description is following a tab character. + var validArgs []string + for _, v := range cmd.ValidArgs { + validArgs = append(validArgs, strings.Split(v, "\t")[0]) + } + for _, v := range args { - if !stringInSlice(v, cmd.ValidArgs) { + if !stringInSlice(v, validArgs) { return fmt.Errorf("invalid argument %q for %q%s", v, cmd.CommandPath(), cmd.findSuggestions(args[0])) } } diff --git a/vendor/github.com/spf13/cobra/bash_completions.go b/vendor/github.com/spf13/cobra/bash_completions.go index 57bb8e1..846636d 100644 --- a/vendor/github.com/spf13/cobra/bash_completions.go +++ b/vendor/github.com/spf13/cobra/bash_completions.go @@ -58,9 +58,103 @@ __%[1]s_contains_word() return 1 } +__%[1]s_handle_go_custom_completion() +{ + __%[1]s_debug "${FUNCNAME[0]}: cur is ${cur}, words[*] is ${words[*]}, #words[@] is ${#words[@]}" + + local shellCompDirectiveError=%[3]d + local shellCompDirectiveNoSpace=%[4]d + local shellCompDirectiveNoFileComp=%[5]d + local shellCompDirectiveFilterFileExt=%[6]d + local shellCompDirectiveFilterDirs=%[7]d + + local out requestComp lastParam lastChar comp directive args + + # Prepare the command to request completions for the program. + # Calling ${words[0]} instead of directly %[1]s allows to handle aliases + args=("${words[@]:1}") + requestComp="${words[0]} %[2]s ${args[*]}" + + lastParam=${words[$((${#words[@]}-1))]} + lastChar=${lastParam:$((${#lastParam}-1)):1} + __%[1]s_debug "${FUNCNAME[0]}: lastParam ${lastParam}, lastChar ${lastChar}" + + if [ -z "${cur}" ] && [ "${lastChar}" != "=" ]; then + # If the last parameter is complete (there is a space following it) + # We add an extra empty parameter so we can indicate this to the go method. + __%[1]s_debug "${FUNCNAME[0]}: Adding extra empty parameter" + requestComp="${requestComp} \"\"" + fi + + __%[1]s_debug "${FUNCNAME[0]}: calling ${requestComp}" + # Use eval to handle any environment variables and such + out=$(eval "${requestComp}" 2>/dev/null) + + # Extract the directive integer at the very end of the output following a colon (:) + directive=${out##*:} + # Remove the directive + out=${out%%:*} + if [ "${directive}" = "${out}" ]; then + # There is not directive specified + directive=0 + fi + __%[1]s_debug "${FUNCNAME[0]}: the completion directive is: ${directive}" + __%[1]s_debug "${FUNCNAME[0]}: the completions are: ${out[*]}" + + if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then + # Error code. No completion. + __%[1]s_debug "${FUNCNAME[0]}: received error from custom completion go code" + return + else + if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then + if [[ $(type -t compopt) = "builtin" ]]; then + __%[1]s_debug "${FUNCNAME[0]}: activating no space" + compopt -o nospace + fi + fi + if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then + if [[ $(type -t compopt) = "builtin" ]]; then + __%[1]s_debug "${FUNCNAME[0]}: activating no file completion" + compopt +o default + fi + fi + fi + + if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then + # File extension filtering + local fullFilter filter filteringCmd + # Do not use quotes around the $out variable or else newline + # characters will be kept. + for filter in ${out[*]}; do + fullFilter+="$filter|" + done + + filteringCmd="_filedir $fullFilter" + __%[1]s_debug "File filtering command: $filteringCmd" + $filteringCmd + elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then + # File completion for directories only + local subDir + # Use printf to strip any trailing newline + subdir=$(printf "%%s" "${out[0]}") + if [ -n "$subdir" ]; then + __%[1]s_debug "Listing directories in $subdir" + __%[1]s_handle_subdirs_in_dir_flag "$subdir" + else + __%[1]s_debug "Listing directories in ." + _filedir -d + fi + else + while IFS='' read -r comp; do + COMPREPLY+=("$comp") + done < <(compgen -W "${out[*]}" -- "$cur") + fi +} + __%[1]s_handle_reply() { __%[1]s_debug "${FUNCNAME[0]}" + local comp case $cur in -*) if [[ $(type -t compopt) = "builtin" ]]; then @@ -72,7 +166,9 @@ __%[1]s_handle_reply() else allflags=("${flags[*]} ${two_word_flags[*]}") fi - COMPREPLY=( $(compgen -W "${allflags[*]}" -- "$cur") ) + while IFS='' read -r comp; do + COMPREPLY+=("$comp") + done < <(compgen -W "${allflags[*]}" -- "$cur") if [[ $(type -t compopt) = "builtin" ]]; then [[ "${COMPREPLY[0]}" == *= ]] || compopt +o nospace fi @@ -117,15 +213,22 @@ __%[1]s_handle_reply() local completions completions=("${commands[@]}") if [[ ${#must_have_one_noun[@]} -ne 0 ]]; then - completions=("${must_have_one_noun[@]}") + completions+=("${must_have_one_noun[@]}") + elif [[ -n "${has_completion_function}" ]]; then + # if a go completion function is provided, defer to that function + __%[1]s_handle_go_custom_completion fi if [[ ${#must_have_one_flag[@]} -ne 0 ]]; then completions+=("${must_have_one_flag[@]}") fi - COMPREPLY=( $(compgen -W "${completions[*]}" -- "$cur") ) + while IFS='' read -r comp; do + COMPREPLY+=("$comp") + done < <(compgen -W "${completions[*]}" -- "$cur") if [[ ${#COMPREPLY[@]} -eq 0 && ${#noun_aliases[@]} -gt 0 && ${#must_have_one_noun[@]} -ne 0 ]]; then - COMPREPLY=( $(compgen -W "${noun_aliases[*]}" -- "$cur") ) + while IFS='' read -r comp; do + COMPREPLY+=("$comp") + done < <(compgen -W "${noun_aliases[*]}" -- "$cur") fi if [[ ${#COMPREPLY[@]} -eq 0 ]]; then @@ -160,7 +263,7 @@ __%[1]s_handle_filename_extension_flag() __%[1]s_handle_subdirs_in_dir_flag() { local dir="$1" - pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 + pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 || return } __%[1]s_handle_flag() @@ -272,7 +375,9 @@ __%[1]s_handle_word() __%[1]s_handle_word } -`, name)) +`, name, ShellCompNoDescRequestCmd, + ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs)) } func writePostscript(buf *bytes.Buffer, name string) { @@ -297,6 +402,7 @@ func writePostscript(buf *bytes.Buffer, name string) { local commands=("%[1]s") local must_have_one_flag=() local must_have_one_noun=() + local has_completion_function local last_command local nouns=() @@ -317,7 +423,7 @@ fi func writeCommands(buf *bytes.Buffer, cmd *Command) { buf.WriteString(" commands=()\n") for _, c := range cmd.Commands() { - if !c.IsAvailableCommand() || c == cmd.helpCommand { + if !c.IsAvailableCommand() && c != cmd.helpCommand { continue } buf.WriteString(fmt.Sprintf(" commands+=(%q)\n", c.Name())) @@ -389,15 +495,32 @@ func writeFlag(buf *bytes.Buffer, flag *pflag.Flag, cmd *Command) { func writeLocalNonPersistentFlag(buf *bytes.Buffer, flag *pflag.Flag) { name := flag.Name - format := " local_nonpersistent_flags+=(\"--%s" + format := " local_nonpersistent_flags+=(\"--%[1]s\")\n" if len(flag.NoOptDefVal) == 0 { - format += "=" + format += " local_nonpersistent_flags+=(\"--%[1]s=\")\n" } - format += "\")\n" buf.WriteString(fmt.Sprintf(format, name)) + if len(flag.Shorthand) > 0 { + buf.WriteString(fmt.Sprintf(" local_nonpersistent_flags+=(\"-%s\")\n", flag.Shorthand)) + } +} + +// Setup annotations for go completions for registered flags +func prepareCustomAnnotationsForFlags(cmd *Command) { + for flag := range flagCompletionFunctions { + // Make sure the completion script calls the __*_go_custom_completion function for + // every registered flag. We need to do this here (and not when the flag was registered + // for completion) so that we can know the root command name for the prefix + // of ___go_custom_completion + if flag.Annotations == nil { + flag.Annotations = map[string][]string{} + } + flag.Annotations[BashCompCustom] = []string{fmt.Sprintf("__%[1]s_handle_go_custom_completion", cmd.Root().Name())} + } } func writeFlags(buf *bytes.Buffer, cmd *Command) { + prepareCustomAnnotationsForFlags(cmd) buf.WriteString(` flags=() two_word_flags=() local_nonpersistent_flags=() @@ -414,7 +537,9 @@ func writeFlags(buf *bytes.Buffer, cmd *Command) { if len(flag.Shorthand) > 0 { writeShortFlag(buf, flag, cmd) } - if localNonPersistentFlags.Lookup(flag.Name) != nil { + // localNonPersistentFlags are used to stop the completion of subcommands when one is set + // if TraverseChildren is true we should allow to complete subcommands + if localNonPersistentFlags.Lookup(flag.Name) != nil && !cmd.Root().TraverseChildren { writeLocalNonPersistentFlag(buf, flag) } }) @@ -460,8 +585,14 @@ func writeRequiredNouns(buf *bytes.Buffer, cmd *Command) { buf.WriteString(" must_have_one_noun=()\n") sort.Sort(sort.StringSlice(cmd.ValidArgs)) for _, value := range cmd.ValidArgs { + // Remove any description that may be included following a tab character. + // Descriptions are not supported by bash completion. + value = strings.Split(value, "\t")[0] buf.WriteString(fmt.Sprintf(" must_have_one_noun+=(%q)\n", value)) } + if cmd.ValidArgsFunction != nil { + buf.WriteString(" has_completion_function=1\n") + } } func writeCmdAliases(buf *bytes.Buffer, cmd *Command) { @@ -489,7 +620,7 @@ func writeArgAliases(buf *bytes.Buffer, cmd *Command) { func gen(buf *bytes.Buffer, cmd *Command) { for _, c := range cmd.Commands() { - if !c.IsAvailableCommand() || c == cmd.helpCommand { + if !c.IsAvailableCommand() && c != cmd.helpCommand { continue } gen(buf, c) diff --git a/vendor/github.com/spf13/cobra/bash_completions.md b/vendor/github.com/spf13/cobra/bash_completions.md index 4ac61ee..a82d5bb 100644 --- a/vendor/github.com/spf13/cobra/bash_completions.md +++ b/vendor/github.com/spf13/cobra/bash_completions.md @@ -1,64 +1,14 @@ -# Generating Bash Completions For Your Own cobra.Command +# Generating Bash Completions For Your cobra.Command -If you are using the generator you can create a completion command by running +Please refer to [Shell Completions](shell_completions.md) for details. -```bash -cobra add completion -``` - -Update the help text show how to install the bash_completion Linux show here [Kubectl docs show mac options](https://kubernetes.io/docs/tasks/tools/install-kubectl/#enabling-shell-autocompletion) - -Writing the shell script to stdout allows the most flexible use. - -```go -// completionCmd represents the completion command -var completionCmd = &cobra.Command{ - Use: "completion", - Short: "Generates bash completion scripts", - Long: `To load completion run - -. <(bitbucket completion) - -To configure your bash shell to load completions for each session add to your bashrc - -# ~/.bashrc or ~/.profile -. <(bitbucket completion) -`, - Run: func(cmd *cobra.Command, args []string) { - rootCmd.GenBashCompletion(os.Stdout); - }, -} -``` - -**Note:** The cobra generator may include messages printed to stdout for example if the config file is loaded, this will break the auto complete script - - -## Example from kubectl - -Generating bash completions from a cobra command is incredibly easy. An actual program which does so for the kubernetes kubectl binary is as follows: +## Bash legacy dynamic completions -```go -package main - -import ( - "io/ioutil" - "os" - - "k8s.io/kubernetes/pkg/kubectl/cmd" - "k8s.io/kubernetes/pkg/kubectl/cmd/util" -) +For backwards-compatibility, Cobra still supports its legacy dynamic completion solution (described below). Unlike the `ValidArgsFunction` solution, the legacy solution will only work for Bash shell-completion and not for other shells. This legacy solution can be used along-side `ValidArgsFunction` and `RegisterFlagCompletionFunc()`, as long as both solutions are not used for the same command. This provides a path to gradually migrate from the legacy solution to the new solution. -func main() { - kubectl := cmd.NewKubectlCommand(util.NewFactory(nil), os.Stdin, ioutil.Discard, ioutil.Discard) - kubectl.GenBashCompletionFile("out.sh") -} -``` +The legacy solution allows you to inject bash functions into the bash completion script. Those bash functions are responsible for providing the completion choices for your own completions. -`out.sh` will get you completions of subcommands and flags. Copy it to `/etc/bash_completion.d/` as described [here](https://debian-administration.org/article/316/An_introduction_to_bash_completion_part_1) and reset your terminal to use autocompletion. If you make additional annotations to your code, you can get even more intelligent and flexible behavior. - -## Creating your own custom functions - -Some more actual code that works in kubernetes: +Some code that works in kubernetes: ```bash const ( @@ -111,108 +61,7 @@ Find more information at https://github.com/GoogleCloudPlatform/kubernetes.`, The `BashCompletionFunction` option is really only valid/useful on the root command. Doing the above will cause `__kubectl_custom_func()` (`___custom_func()`) to be called when the built in processor was unable to find a solution. In the case of kubernetes a valid command might look something like `kubectl get pod [mypod]`. If you type `kubectl get pod [tab][tab]` the `__kubectl_customc_func()` will run because the cobra.Command only understood "kubectl" and "get." `__kubectl_custom_func()` will see that the cobra.Command is "kubectl_get" and will thus call another helper `__kubectl_get_resource()`. `__kubectl_get_resource` will look at the 'nouns' collected. In our example the only noun will be `pod`. So it will call `__kubectl_parse_get pod`. `__kubectl_parse_get` will actually call out to kubernetes and get any pods. It will then set `COMPREPLY` to valid pods! -## Have the completions code complete your 'nouns' - -In the above example "pod" was assumed to already be typed. But if you want `kubectl get [tab][tab]` to show a list of valid "nouns" you have to set them. Simplified code from `kubectl get` looks like: - -```go -validArgs []string = { "pod", "node", "service", "replicationcontroller" } - -cmd := &cobra.Command{ - Use: "get [(-o|--output=)json|yaml|template|...] (RESOURCE [NAME] | RESOURCE/NAME ...)", - Short: "Display one or many resources", - Long: get_long, - Example: get_example, - Run: func(cmd *cobra.Command, args []string) { - err := RunGet(f, out, cmd, args) - util.CheckErr(err) - }, - ValidArgs: validArgs, -} -``` - -Notice we put the "ValidArgs" on the "get" subcommand. Doing so will give results like - -```bash -# kubectl get [tab][tab] -node pod replicationcontroller service -``` - -## Plural form and shortcuts for nouns - -If your nouns have a number of aliases, you can define them alongside `ValidArgs` using `ArgAliases`: - -```go -argAliases []string = { "pods", "nodes", "services", "svc", "replicationcontrollers", "rc" } - -cmd := &cobra.Command{ - ... - ValidArgs: validArgs, - ArgAliases: argAliases -} -``` - -The aliases are not shown to the user on tab completion, but they are accepted as valid nouns by -the completion algorithm if entered manually, e.g. in: - -```bash -# kubectl get rc [tab][tab] -backend frontend database -``` - -Note that without declaring `rc` as an alias, the completion algorithm would show the list of nouns -in this example again instead of the replication controllers. - -## Mark flags as required - -Most of the time completions will only show subcommands. But if a flag is required to make a subcommand work, you probably want it to show up when the user types [tab][tab]. Marking a flag as 'Required' is incredibly easy. - -```go -cmd.MarkFlagRequired("pod") -cmd.MarkFlagRequired("container") -``` - -and you'll get something like - -```bash -# kubectl exec [tab][tab][tab] --c --container= -p --pod= -``` - -# Specify valid filename extensions for flags that take a filename - -In this example we use --filename= and expect to get a json or yaml file as the argument. To make this easier we annotate the --filename flag with valid filename extensions. - -```go - annotations := []string{"json", "yaml", "yml"} - annotation := make(map[string][]string) - annotation[cobra.BashCompFilenameExt] = annotations - - flag := &pflag.Flag{ - Name: "filename", - Shorthand: "f", - Usage: usage, - Value: value, - DefValue: value.String(), - Annotations: annotation, - } - cmd.Flags().AddFlag(flag) -``` - -Now when you run a command with this filename flag you'll get something like - -```bash -# kubectl create -f -test/ example/ rpmbuild/ -hello.yml test.json -``` - -So while there are many other files in the CWD it only shows me subdirs and those with valid extensions. - -# Specify custom flag completion - -Similar to the filename completion and filtering using cobra.BashCompFilenameExt, you can specify -a custom flag completion function with cobra.BashCompCustom: +Similarly, for flags: ```go annotation := make(map[string][]string) @@ -226,7 +75,7 @@ a custom flag completion function with cobra.BashCompCustom: cmd.Flags().AddFlag(flag) ``` -In addition add the `__handle_namespace_flag` implementation in the `BashCompletionFunction` +In addition add the `__kubectl_get_namespaces` implementation in the `BashCompletionFunction` value, e.g.: ```bash @@ -240,17 +89,3 @@ __kubectl_get_namespaces() fi } ``` -# Using bash aliases for commands - -You can also configure the `bash aliases` for the commands and they will also support completions. - -```bash -alias aliasname=origcommand -complete -o default -F __start_origcommand aliasname - -# and now when you run `aliasname` completion will make -# suggestions as it did for `origcommand`. - -$) aliasname -completion firstcommand secondcommand -``` diff --git a/vendor/github.com/spf13/cobra/cobra.go b/vendor/github.com/spf13/cobra/cobra.go index 6505c07..d01becc 100644 --- a/vendor/github.com/spf13/cobra/cobra.go +++ b/vendor/github.com/spf13/cobra/cobra.go @@ -52,7 +52,7 @@ var EnableCommandSorting = true // if the CLI is started from explorer.exe. // To disable the mousetrap, just set this variable to blank string (""). // Works only on Microsoft Windows. -var MousetrapHelpText string = `This is a command line tool. +var MousetrapHelpText = `This is a command line tool. You need to open cmd.exe and run it from there. ` @@ -61,7 +61,7 @@ You need to open cmd.exe and run it from there. // if the CLI is started from explorer.exe. Set to 0 to wait for the return key to be pressed. // To disable the mousetrap, just set MousetrapHelpText to blank string (""). // Works only on Microsoft Windows. -var MousetrapDisplayDuration time.Duration = 5 * time.Second +var MousetrapDisplayDuration = 5 * time.Second // AddTemplateFunc adds a template function that's available to Usage and Help // template generation. diff --git a/vendor/github.com/spf13/cobra/command.go b/vendor/github.com/spf13/cobra/command.go index c7e8983..77b399e 100644 --- a/vendor/github.com/spf13/cobra/command.go +++ b/vendor/github.com/spf13/cobra/command.go @@ -17,6 +17,7 @@ package cobra import ( "bytes" + "context" "fmt" "io" "os" @@ -36,6 +37,14 @@ type FParseErrWhitelist flag.ParseErrorsWhitelist // definition to ensure usability. type Command struct { // Use is the one-line usage message. + // Recommended syntax is as follow: + // [ ] identifies an optional argument. Arguments that are not enclosed in brackets are required. + // ... indicates that you can specify multiple values for the previous argument. + // | indicates mutually exclusive information. You can use the argument to the left of the separator or the + // argument to the right of the separator. You cannot use both arguments in a single use of the command. + // { } delimits a set of mutually exclusive arguments when one of the arguments is required. If the arguments are + // optional, they are enclosed in brackets ([ ]). + // Example: add [-F file | -D dir]... [-f format] profile Use string // Aliases is an array of aliases that can be used instead of the first word in Use. @@ -56,6 +65,10 @@ type Command struct { // ValidArgs is list of all valid non-flag arguments that are accepted in bash completions ValidArgs []string + // ValidArgsFunction is an optional function that provides valid non-flag arguments for bash completion. + // It is a dynamic version of using ValidArgs. + // Only one of ValidArgs and ValidArgsFunction can be used for a command. + ValidArgsFunction func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) // Expected arguments Args PositionalArgs @@ -80,7 +93,8 @@ type Command struct { // Version defines the version for this command. If this value is non-empty and the command does not // define a "version" flag, a "version" boolean flag will be added to the command and, if specified, - // will print content of the "Version" variable. + // will print content of the "Version" variable. A shorthand "v" flag will also be added if the + // command does not define one. Version string // The *Run functions are executed in the following order: @@ -140,9 +154,11 @@ type Command struct { // TraverseChildren parses flags on all parents before executing child command. TraverseChildren bool - //FParseErrWhitelist flag parse errors to be ignored + // FParseErrWhitelist flag parse errors to be ignored FParseErrWhitelist FParseErrWhitelist + ctx context.Context + // commands is the list of commands supported by this program. commands []*Command // parent is a parent command for this command. @@ -202,6 +218,12 @@ type Command struct { errWriter io.Writer } +// Context returns underlying command context. If command wasn't +// executed with ExecuteContext Context returns Background context. +func (c *Command) Context() context.Context { + return c.ctx +} + // SetArgs sets arguments for the command. It is set to os.Args[1:] by default, if desired, can be overridden // particularly useful when testing. func (c *Command) SetArgs(a []string) { @@ -228,7 +250,7 @@ func (c *Command) SetErr(newErr io.Writer) { c.errWriter = newErr } -// SetOut sets the source for input data +// SetIn sets the source for input data // If newIn is nil, os.Stdin is used. func (c *Command) SetIn(newIn io.Reader) { c.inReader = newIn @@ -297,7 +319,7 @@ func (c *Command) ErrOrStderr() io.Writer { return c.getErr(os.Stderr) } -// ErrOrStderr returns output to stderr +// InOrStdin returns input to stdin func (c *Command) InOrStdin() io.Reader { return c.getIn(os.Stdin) } @@ -345,7 +367,7 @@ func (c *Command) UsageFunc() (f func(*Command) error) { c.mergePersistentFlags() err := tmpl(c.OutOrStderr(), c.UsageTemplate(), c) if err != nil { - c.Println(err) + c.PrintErrln(err) } return err } @@ -369,9 +391,11 @@ func (c *Command) HelpFunc() func(*Command, []string) { } return func(c *Command, a []string) { c.mergePersistentFlags() + // The help should be sent to stdout + // See https://github.com/spf13/cobra/issues/1002 err := tmpl(c.OutOrStdout(), c.HelpTemplate(), c) if err != nil { - c.Println(err) + c.PrintErrln(err) } } } @@ -857,6 +881,13 @@ func (c *Command) preRun() { } } +// ExecuteContext is the same as Execute(), but sets the ctx on the command. +// Retrieve ctx by calling cmd.Context() inside your *Run lifecycle functions. +func (c *Command) ExecuteContext(ctx context.Context) error { + c.ctx = ctx + return c.Execute() +} + // Execute uses the args (os.Args[1:] by default) // and run through the command tree finding appropriate matches // for commands and then corresponding flags. @@ -867,6 +898,10 @@ func (c *Command) Execute() error { // ExecuteC executes the command. func (c *Command) ExecuteC() (cmd *Command, err error) { + if c.ctx == nil { + c.ctx = context.Background() + } + // Regardless of what command execute is called on, run on Root only if c.HasParent() { return c.Root().ExecuteC() @@ -888,6 +923,9 @@ func (c *Command) ExecuteC() (cmd *Command, err error) { args = os.Args[1:] } + // initialize the hidden command to be used for bash completion + c.initCompleteCmd(args) + var flags []string if c.TraverseChildren { cmd, flags, err = c.Traverse(args) @@ -900,8 +938,8 @@ func (c *Command) ExecuteC() (cmd *Command, err error) { c = cmd } if !c.SilenceErrors { - c.Println("Error:", err.Error()) - c.Printf("Run '%v --help' for usage.\n", c.CommandPath()) + c.PrintErrln("Error:", err.Error()) + c.PrintErrf("Run '%v --help' for usage.\n", c.CommandPath()) } return c, err } @@ -911,6 +949,12 @@ func (c *Command) ExecuteC() (cmd *Command, err error) { cmd.commandCalledAs.name = cmd.Name() } + // We have to pass global context to children command + // if context is present on the parent command. + if cmd.ctx == nil { + cmd.ctx = c.ctx + } + err = cmd.execute(flags) if err != nil { // Always show help if requested, even if SilenceErrors is in @@ -923,7 +967,7 @@ func (c *Command) ExecuteC() (cmd *Command, err error) { // If root command has SilentErrors flagged, // all subcommands should respect it if !cmd.SilenceErrors && !c.SilenceErrors { - c.Println("Error:", err.Error()) + c.PrintErrln("Error:", err.Error()) } // If root command has SilentUsage flagged, @@ -943,6 +987,10 @@ func (c *Command) ValidateArgs(args []string) error { } func (c *Command) validateRequiredFlags() error { + if c.DisableFlagParsing { + return nil + } + flags := c.Flags() missingFlagNames := []string{} flags.VisitAll(func(pflag *flag.Flag) { @@ -994,7 +1042,11 @@ func (c *Command) InitDefaultVersionFlag() { } else { usage += c.Name() } - c.Flags().Bool("version", false, usage) + if c.Flags().ShorthandLookup("v") == nil { + c.Flags().BoolP("version", "v", false, usage) + } else { + c.Flags().Bool("version", false, usage) + } } } @@ -1012,7 +1064,25 @@ func (c *Command) InitDefaultHelpCmd() { Short: "Help about any command", Long: `Help provides help for any command in the application. Simply type ` + c.Name() + ` help [path to command] for full details.`, - + ValidArgsFunction: func(c *Command, args []string, toComplete string) ([]string, ShellCompDirective) { + var completions []string + cmd, _, e := c.Root().Find(args) + if e != nil { + return nil, ShellCompDirectiveNoFileComp + } + if cmd == nil { + // Root help command. + cmd = c.Root() + } + for _, subCmd := range cmd.Commands() { + if subCmd.IsAvailableCommand() || subCmd == cmd.helpCommand { + if strings.HasPrefix(subCmd.Name(), toComplete) { + completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short)) + } + } + } + return completions, ShellCompDirectiveNoFileComp + }, Run: func(c *Command, args []string) { cmd, _, e := c.Root().Find(args) if cmd == nil || e != nil { @@ -1139,12 +1209,12 @@ func (c *Command) PrintErr(i ...interface{}) { // PrintErrln is a convenience method to Println to the defined Err output, fallback to Stderr if not set. func (c *Command) PrintErrln(i ...interface{}) { - c.Print(fmt.Sprintln(i...)) + c.PrintErr(fmt.Sprintln(i...)) } // PrintErrf is a convenience method to Printf to the defined Err output, fallback to Stderr if not set. func (c *Command) PrintErrf(format string, i ...interface{}) { - c.Print(fmt.Sprintf(format, i...)) + c.PrintErr(fmt.Sprintf(format, i...)) } // CommandPath returns the full path to this command. @@ -1547,7 +1617,7 @@ func (c *Command) ParseFlags(args []string) error { beforeErrorBufLen := c.flagErrorBuf.Len() c.mergePersistentFlags() - //do it here after merging all flags and just before parse + // do it here after merging all flags and just before parse c.Flags().ParseErrorsWhitelist = flag.ParseErrorsWhitelist(c.FParseErrWhitelist) err := c.Flags().Parse(args) diff --git a/vendor/github.com/spf13/cobra/custom_completions.go b/vendor/github.com/spf13/cobra/custom_completions.go new file mode 100644 index 0000000..f9e88e0 --- /dev/null +++ b/vendor/github.com/spf13/cobra/custom_completions.go @@ -0,0 +1,557 @@ +package cobra + +import ( + "fmt" + "os" + "strings" + + "github.com/spf13/pflag" +) + +const ( + // ShellCompRequestCmd is the name of the hidden command that is used to request + // completion results from the program. It is used by the shell completion scripts. + ShellCompRequestCmd = "__complete" + // ShellCompNoDescRequestCmd is the name of the hidden command that is used to request + // completion results without their description. It is used by the shell completion scripts. + ShellCompNoDescRequestCmd = "__completeNoDesc" +) + +// Global map of flag completion functions. +var flagCompletionFunctions = map[*pflag.Flag]func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective){} + +// ShellCompDirective is a bit map representing the different behaviors the shell +// can be instructed to have once completions have been provided. +type ShellCompDirective int + +const ( + // ShellCompDirectiveError indicates an error occurred and completions should be ignored. + ShellCompDirectiveError ShellCompDirective = 1 << iota + + // ShellCompDirectiveNoSpace indicates that the shell should not add a space + // after the completion even if there is a single completion provided. + ShellCompDirectiveNoSpace + + // ShellCompDirectiveNoFileComp indicates that the shell should not provide + // file completion even when no completion is provided. + // This currently does not work for zsh or bash < 4 + ShellCompDirectiveNoFileComp + + // ShellCompDirectiveFilterFileExt indicates that the provided completions + // should be used as file extension filters. + // For flags, using Command.MarkFlagFilename() and Command.MarkPersistentFlagFilename() + // is a shortcut to using this directive explicitly. The BashCompFilenameExt + // annotation can also be used to obtain the same behavior for flags. + ShellCompDirectiveFilterFileExt + + // ShellCompDirectiveFilterDirs indicates that only directory names should + // be provided in file completion. To request directory names within another + // directory, the returned completions should specify the directory within + // which to search. The BashCompSubdirsInDir annotation can be used to + // obtain the same behavior but only for flags. + ShellCompDirectiveFilterDirs + + // =========================================================================== + + // All directives using iota should be above this one. + // For internal use. + shellCompDirectiveMaxValue + + // ShellCompDirectiveDefault indicates to let the shell perform its default + // behavior after completions have been provided. + // This one must be last to avoid messing up the iota count. + ShellCompDirectiveDefault ShellCompDirective = 0 +) + +// RegisterFlagCompletionFunc should be called to register a function to provide completion for a flag. +func (c *Command) RegisterFlagCompletionFunc(flagName string, f func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective)) error { + flag := c.Flag(flagName) + if flag == nil { + return fmt.Errorf("RegisterFlagCompletionFunc: flag '%s' does not exist", flagName) + } + if _, exists := flagCompletionFunctions[flag]; exists { + return fmt.Errorf("RegisterFlagCompletionFunc: flag '%s' already registered", flagName) + } + flagCompletionFunctions[flag] = f + return nil +} + +// Returns a string listing the different directive enabled in the specified parameter +func (d ShellCompDirective) string() string { + var directives []string + if d&ShellCompDirectiveError != 0 { + directives = append(directives, "ShellCompDirectiveError") + } + if d&ShellCompDirectiveNoSpace != 0 { + directives = append(directives, "ShellCompDirectiveNoSpace") + } + if d&ShellCompDirectiveNoFileComp != 0 { + directives = append(directives, "ShellCompDirectiveNoFileComp") + } + if d&ShellCompDirectiveFilterFileExt != 0 { + directives = append(directives, "ShellCompDirectiveFilterFileExt") + } + if d&ShellCompDirectiveFilterDirs != 0 { + directives = append(directives, "ShellCompDirectiveFilterDirs") + } + if len(directives) == 0 { + directives = append(directives, "ShellCompDirectiveDefault") + } + + if d >= shellCompDirectiveMaxValue { + return fmt.Sprintf("ERROR: unexpected ShellCompDirective value: %d", d) + } + return strings.Join(directives, ", ") +} + +// Adds a special hidden command that can be used to request custom completions. +func (c *Command) initCompleteCmd(args []string) { + completeCmd := &Command{ + Use: fmt.Sprintf("%s [command-line]", ShellCompRequestCmd), + Aliases: []string{ShellCompNoDescRequestCmd}, + DisableFlagsInUseLine: true, + Hidden: true, + DisableFlagParsing: true, + Args: MinimumNArgs(1), + Short: "Request shell completion choices for the specified command-line", + Long: fmt.Sprintf("%[2]s is a special command that is used by the shell completion logic\n%[1]s", + "to request completion choices for the specified command-line.", ShellCompRequestCmd), + Run: func(cmd *Command, args []string) { + finalCmd, completions, directive, err := cmd.getCompletions(args) + if err != nil { + CompErrorln(err.Error()) + // Keep going for multiple reasons: + // 1- There could be some valid completions even though there was an error + // 2- Even without completions, we need to print the directive + } + + noDescriptions := (cmd.CalledAs() == ShellCompNoDescRequestCmd) + for _, comp := range completions { + if noDescriptions { + // Remove any description that may be included following a tab character. + comp = strings.Split(comp, "\t")[0] + } + + // Make sure we only write the first line to the output. + // This is needed if a description contains a linebreak. + // Otherwise the shell scripts will interpret the other lines as new flags + // and could therefore provide a wrong completion. + comp = strings.Split(comp, "\n")[0] + + // Finally trim the completion. This is especially important to get rid + // of a trailing tab when there are no description following it. + // For example, a sub-command without a description should not be completed + // with a tab at the end (or else zsh will show a -- following it + // although there is no description). + comp = strings.TrimSpace(comp) + + // Print each possible completion to stdout for the completion script to consume. + fmt.Fprintln(finalCmd.OutOrStdout(), comp) + } + + if directive >= shellCompDirectiveMaxValue { + directive = ShellCompDirectiveDefault + } + + // As the last printout, print the completion directive for the completion script to parse. + // The directive integer must be that last character following a single colon (:). + // The completion script expects : + fmt.Fprintf(finalCmd.OutOrStdout(), ":%d\n", directive) + + // Print some helpful info to stderr for the user to understand. + // Output from stderr must be ignored by the completion script. + fmt.Fprintf(finalCmd.ErrOrStderr(), "Completion ended with directive: %s\n", directive.string()) + }, + } + c.AddCommand(completeCmd) + subCmd, _, err := c.Find(args) + if err != nil || subCmd.Name() != ShellCompRequestCmd { + // Only create this special command if it is actually being called. + // This reduces possible side-effects of creating such a command; + // for example, having this command would cause problems to a + // cobra program that only consists of the root command, since this + // command would cause the root command to suddenly have a subcommand. + c.RemoveCommand(completeCmd) + } +} + +func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDirective, error) { + // The last argument, which is not completely typed by the user, + // should not be part of the list of arguments + toComplete := args[len(args)-1] + trimmedArgs := args[:len(args)-1] + + var finalCmd *Command + var finalArgs []string + var err error + // Find the real command for which completion must be performed + // check if we need to traverse here to parse local flags on parent commands + if c.Root().TraverseChildren { + finalCmd, finalArgs, err = c.Root().Traverse(trimmedArgs) + } else { + finalCmd, finalArgs, err = c.Root().Find(trimmedArgs) + } + if err != nil { + // Unable to find the real command. E.g., someInvalidCmd + return c, []string{}, ShellCompDirectiveDefault, fmt.Errorf("Unable to find a command for arguments: %v", trimmedArgs) + } + + // Check if we are doing flag value completion before parsing the flags. + // This is important because if we are completing a flag value, we need to also + // remove the flag name argument from the list of finalArgs or else the parsing + // could fail due to an invalid value (incomplete) for the flag. + flag, finalArgs, toComplete, err := checkIfFlagCompletion(finalCmd, finalArgs, toComplete) + if err != nil { + // Error while attempting to parse flags + return finalCmd, []string{}, ShellCompDirectiveDefault, err + } + + // Parse the flags early so we can check if required flags are set + if err = finalCmd.ParseFlags(finalArgs); err != nil { + return finalCmd, []string{}, ShellCompDirectiveDefault, fmt.Errorf("Error while parsing flags from args %v: %s", finalArgs, err.Error()) + } + + if flag != nil { + // Check if we are completing a flag value subject to annotations + if validExts, present := flag.Annotations[BashCompFilenameExt]; present { + if len(validExts) != 0 { + // File completion filtered by extensions + return finalCmd, validExts, ShellCompDirectiveFilterFileExt, nil + } + + // The annotation requests simple file completion. There is no reason to do + // that since it is the default behavior anyway. Let's ignore this annotation + // in case the program also registered a completion function for this flag. + // Even though it is a mistake on the program's side, let's be nice when we can. + } + + if subDir, present := flag.Annotations[BashCompSubdirsInDir]; present { + if len(subDir) == 1 { + // Directory completion from within a directory + return finalCmd, subDir, ShellCompDirectiveFilterDirs, nil + } + // Directory completion + return finalCmd, []string{}, ShellCompDirectiveFilterDirs, nil + } + } + + // When doing completion of a flag name, as soon as an argument starts with + // a '-' we know it is a flag. We cannot use isFlagArg() here as it requires + // the flag name to be complete + if flag == nil && len(toComplete) > 0 && toComplete[0] == '-' && !strings.Contains(toComplete, "=") { + var completions []string + + // First check for required flags + completions = completeRequireFlags(finalCmd, toComplete) + + // If we have not found any required flags, only then can we show regular flags + if len(completions) == 0 { + doCompleteFlags := func(flag *pflag.Flag) { + if !flag.Changed || + strings.Contains(flag.Value.Type(), "Slice") || + strings.Contains(flag.Value.Type(), "Array") { + // If the flag is not already present, or if it can be specified multiple times (Array or Slice) + // we suggest it as a completion + completions = append(completions, getFlagNameCompletions(flag, toComplete)...) + } + } + + // We cannot use finalCmd.Flags() because we may not have called ParsedFlags() for commands + // that have set DisableFlagParsing; it is ParseFlags() that merges the inherited and + // non-inherited flags. + finalCmd.InheritedFlags().VisitAll(func(flag *pflag.Flag) { + doCompleteFlags(flag) + }) + finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { + doCompleteFlags(flag) + }) + } + + directive := ShellCompDirectiveNoFileComp + if len(completions) == 1 && strings.HasSuffix(completions[0], "=") { + // If there is a single completion, the shell usually adds a space + // after the completion. We don't want that if the flag ends with an = + directive = ShellCompDirectiveNoSpace + } + return finalCmd, completions, directive, nil + } + + // We only remove the flags from the arguments if DisableFlagParsing is not set. + // This is important for commands which have requested to do their own flag completion. + if !finalCmd.DisableFlagParsing { + finalArgs = finalCmd.Flags().Args() + } + + var completions []string + directive := ShellCompDirectiveDefault + if flag == nil { + foundLocalNonPersistentFlag := false + // If TraverseChildren is true on the root command we don't check for + // local flags because we can use a local flag on a parent command + if !finalCmd.Root().TraverseChildren { + // Check if there are any local, non-persistent flags on the command-line + localNonPersistentFlags := finalCmd.LocalNonPersistentFlags() + finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { + if localNonPersistentFlags.Lookup(flag.Name) != nil && flag.Changed { + foundLocalNonPersistentFlag = true + } + }) + } + + // Complete subcommand names, including the help command + if len(finalArgs) == 0 && !foundLocalNonPersistentFlag { + // We only complete sub-commands if: + // - there are no arguments on the command-line and + // - there are no local, non-peristent flag on the command-line or TraverseChildren is true + for _, subCmd := range finalCmd.Commands() { + if subCmd.IsAvailableCommand() || subCmd == finalCmd.helpCommand { + if strings.HasPrefix(subCmd.Name(), toComplete) { + completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short)) + } + directive = ShellCompDirectiveNoFileComp + } + } + } + + // Complete required flags even without the '-' prefix + completions = append(completions, completeRequireFlags(finalCmd, toComplete)...) + + // Always complete ValidArgs, even if we are completing a subcommand name. + // This is for commands that have both subcommands and ValidArgs. + if len(finalCmd.ValidArgs) > 0 { + if len(finalArgs) == 0 { + // ValidArgs are only for the first argument + for _, validArg := range finalCmd.ValidArgs { + if strings.HasPrefix(validArg, toComplete) { + completions = append(completions, validArg) + } + } + directive = ShellCompDirectiveNoFileComp + + // If no completions were found within commands or ValidArgs, + // see if there are any ArgAliases that should be completed. + if len(completions) == 0 { + for _, argAlias := range finalCmd.ArgAliases { + if strings.HasPrefix(argAlias, toComplete) { + completions = append(completions, argAlias) + } + } + } + } + + // If there are ValidArgs specified (even if they don't match), we stop completion. + // Only one of ValidArgs or ValidArgsFunction can be used for a single command. + return finalCmd, completions, directive, nil + } + + // Let the logic continue so as to add any ValidArgsFunction completions, + // even if we already found sub-commands. + // This is for commands that have subcommands but also specify a ValidArgsFunction. + } + + // Find the completion function for the flag or command + var completionFn func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) + if flag != nil { + completionFn = flagCompletionFunctions[flag] + } else { + completionFn = finalCmd.ValidArgsFunction + } + if completionFn != nil { + // Go custom completion defined for this flag or command. + // Call the registered completion function to get the completions. + var comps []string + comps, directive = completionFn(finalCmd, finalArgs, toComplete) + completions = append(completions, comps...) + } + + return finalCmd, completions, directive, nil +} + +func getFlagNameCompletions(flag *pflag.Flag, toComplete string) []string { + if nonCompletableFlag(flag) { + return []string{} + } + + var completions []string + flagName := "--" + flag.Name + if strings.HasPrefix(flagName, toComplete) { + // Flag without the = + completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage)) + + // Why suggest both long forms: --flag and --flag= ? + // This forces the user to *always* have to type either an = or a space after the flag name. + // Let's be nice and avoid making users have to do that. + // Since boolean flags and shortname flags don't show the = form, let's go that route and never show it. + // The = form will still work, we just won't suggest it. + // This also makes the list of suggested flags shorter as we avoid all the = forms. + // + // if len(flag.NoOptDefVal) == 0 { + // // Flag requires a value, so it can be suffixed with = + // flagName += "=" + // completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage)) + // } + } + + flagName = "-" + flag.Shorthand + if len(flag.Shorthand) > 0 && strings.HasPrefix(flagName, toComplete) { + completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage)) + } + + return completions +} + +func completeRequireFlags(finalCmd *Command, toComplete string) []string { + var completions []string + + doCompleteRequiredFlags := func(flag *pflag.Flag) { + if _, present := flag.Annotations[BashCompOneRequiredFlag]; present { + if !flag.Changed { + // If the flag is not already present, we suggest it as a completion + completions = append(completions, getFlagNameCompletions(flag, toComplete)...) + } + } + } + + // We cannot use finalCmd.Flags() because we may not have called ParsedFlags() for commands + // that have set DisableFlagParsing; it is ParseFlags() that merges the inherited and + // non-inherited flags. + finalCmd.InheritedFlags().VisitAll(func(flag *pflag.Flag) { + doCompleteRequiredFlags(flag) + }) + finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { + doCompleteRequiredFlags(flag) + }) + + return completions +} + +func checkIfFlagCompletion(finalCmd *Command, args []string, lastArg string) (*pflag.Flag, []string, string, error) { + if finalCmd.DisableFlagParsing { + // We only do flag completion if we are allowed to parse flags + // This is important for commands which have requested to do their own flag completion. + return nil, args, lastArg, nil + } + + var flagName string + trimmedArgs := args + flagWithEqual := false + + // When doing completion of a flag name, as soon as an argument starts with + // a '-' we know it is a flag. We cannot use isFlagArg() here as that function + // requires the flag name to be complete + if len(lastArg) > 0 && lastArg[0] == '-' { + if index := strings.Index(lastArg, "="); index >= 0 { + // Flag with an = + flagName = strings.TrimLeft(lastArg[:index], "-") + lastArg = lastArg[index+1:] + flagWithEqual = true + } else { + // Normal flag completion + return nil, args, lastArg, nil + } + } + + if len(flagName) == 0 { + if len(args) > 0 { + prevArg := args[len(args)-1] + if isFlagArg(prevArg) { + // Only consider the case where the flag does not contain an =. + // If the flag contains an = it means it has already been fully processed, + // so we don't need to deal with it here. + if index := strings.Index(prevArg, "="); index < 0 { + flagName = strings.TrimLeft(prevArg, "-") + + // Remove the uncompleted flag or else there could be an error created + // for an invalid value for that flag + trimmedArgs = args[:len(args)-1] + } + } + } + } + + if len(flagName) == 0 { + // Not doing flag completion + return nil, trimmedArgs, lastArg, nil + } + + flag := findFlag(finalCmd, flagName) + if flag == nil { + // Flag not supported by this command, nothing to complete + err := fmt.Errorf("Subcommand '%s' does not support flag '%s'", finalCmd.Name(), flagName) + return nil, nil, "", err + } + + if !flagWithEqual { + if len(flag.NoOptDefVal) != 0 { + // We had assumed dealing with a two-word flag but the flag is a boolean flag. + // In that case, there is no value following it, so we are not really doing flag completion. + // Reset everything to do noun completion. + trimmedArgs = args + flag = nil + } + } + + return flag, trimmedArgs, lastArg, nil +} + +func findFlag(cmd *Command, name string) *pflag.Flag { + flagSet := cmd.Flags() + if len(name) == 1 { + // First convert the short flag into a long flag + // as the cmd.Flag() search only accepts long flags + if short := flagSet.ShorthandLookup(name); short != nil { + name = short.Name + } else { + set := cmd.InheritedFlags() + if short = set.ShorthandLookup(name); short != nil { + name = short.Name + } else { + return nil + } + } + } + return cmd.Flag(name) +} + +// CompDebug prints the specified string to the same file as where the +// completion script prints its logs. +// Note that completion printouts should never be on stdout as they would +// be wrongly interpreted as actual completion choices by the completion script. +func CompDebug(msg string, printToStdErr bool) { + msg = fmt.Sprintf("[Debug] %s", msg) + + // Such logs are only printed when the user has set the environment + // variable BASH_COMP_DEBUG_FILE to the path of some file to be used. + if path := os.Getenv("BASH_COMP_DEBUG_FILE"); path != "" { + f, err := os.OpenFile(path, + os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err == nil { + defer f.Close() + f.WriteString(msg) + } + } + + if printToStdErr { + // Must print to stderr for this not to be read by the completion script. + fmt.Fprintf(os.Stderr, msg) + } +} + +// CompDebugln prints the specified string with a newline at the end +// to the same file as where the completion script prints its logs. +// Such logs are only printed when the user has set the environment +// variable BASH_COMP_DEBUG_FILE to the path of some file to be used. +func CompDebugln(msg string, printToStdErr bool) { + CompDebug(fmt.Sprintf("%s\n", msg), printToStdErr) +} + +// CompError prints the specified completion message to stderr. +func CompError(msg string) { + msg = fmt.Sprintf("[Error] %s", msg) + CompDebug(msg, true) +} + +// CompErrorln prints the specified completion message to stderr with a newline at the end. +func CompErrorln(msg string) { + CompError(fmt.Sprintf("%s\n", msg)) +} diff --git a/vendor/github.com/spf13/cobra/fish_completions.go b/vendor/github.com/spf13/cobra/fish_completions.go new file mode 100644 index 0000000..eaae9bc --- /dev/null +++ b/vendor/github.com/spf13/cobra/fish_completions.go @@ -0,0 +1,207 @@ +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" + "strings" +) + +func genFishComp(buf *bytes.Buffer, name string, includeDesc bool) { + // Variables should not contain a '-' or ':' character + nameForVar := name + nameForVar = strings.Replace(nameForVar, "-", "_", -1) + nameForVar = strings.Replace(nameForVar, ":", "_", -1) + + compCmd := ShellCompRequestCmd + if !includeDesc { + compCmd = ShellCompNoDescRequestCmd + } + buf.WriteString(fmt.Sprintf("# fish completion for %-36s -*- shell-script -*-\n", name)) + buf.WriteString(fmt.Sprintf(` +function __%[1]s_debug + set file "$BASH_COMP_DEBUG_FILE" + if test -n "$file" + echo "$argv" >> $file + end +end + +function __%[1]s_perform_completion + __%[1]s_debug "Starting __%[1]s_perform_completion with: $argv" + + set args (string split -- " " "$argv") + set lastArg "$args[-1]" + + __%[1]s_debug "args: $args" + __%[1]s_debug "last arg: $lastArg" + + set emptyArg "" + if test -z "$lastArg" + __%[1]s_debug "Setting emptyArg" + set emptyArg \"\" + end + __%[1]s_debug "emptyArg: $emptyArg" + + if not type -q "$args[1]" + # This can happen when "complete --do-complete %[2]s" is called when running this script. + __%[1]s_debug "Cannot find $args[1]. No completions." + return + end + + set requestComp "$args[1] %[3]s $args[2..-1] $emptyArg" + __%[1]s_debug "Calling $requestComp" + + set results (eval $requestComp 2> /dev/null) + set comps $results[1..-2] + set directiveLine $results[-1] + + # For Fish, when completing a flag with an = (e.g., -n=) + # completions must be prefixed with the flag + set flagPrefix (string match -r -- '-.*=' "$lastArg") + + __%[1]s_debug "Comps: $comps" + __%[1]s_debug "DirectiveLine: $directiveLine" + __%[1]s_debug "flagPrefix: $flagPrefix" + + for comp in $comps + printf "%%s%%s\n" "$flagPrefix" "$comp" + end + + printf "%%s\n" "$directiveLine" +end + +# This function does three things: +# 1- Obtain the completions and store them in the global __%[1]s_comp_results +# 2- Set the __%[1]s_comp_do_file_comp flag if file completion should be performed +# and unset it otherwise +# 3- Return true if the completion results are not empty +function __%[1]s_prepare_completions + # Start fresh + set --erase __%[1]s_comp_do_file_comp + set --erase __%[1]s_comp_results + + # Check if the command-line is already provided. This is useful for testing. + if not set --query __%[1]s_comp_commandLine + # Use the -c flag to allow for completion in the middle of the line + set __%[1]s_comp_commandLine (commandline -c) + end + __%[1]s_debug "commandLine is: $__%[1]s_comp_commandLine" + + set results (__%[1]s_perform_completion "$__%[1]s_comp_commandLine") + set --erase __%[1]s_comp_commandLine + __%[1]s_debug "Completion results: $results" + + if test -z "$results" + __%[1]s_debug "No completion, probably due to a failure" + # Might as well do file completion, in case it helps + set --global __%[1]s_comp_do_file_comp 1 + return 1 + end + + set directive (string sub --start 2 $results[-1]) + set --global __%[1]s_comp_results $results[1..-2] + + __%[1]s_debug "Completions are: $__%[1]s_comp_results" + __%[1]s_debug "Directive is: $directive" + + set shellCompDirectiveError %[4]d + set shellCompDirectiveNoSpace %[5]d + set shellCompDirectiveNoFileComp %[6]d + set shellCompDirectiveFilterFileExt %[7]d + set shellCompDirectiveFilterDirs %[8]d + + if test -z "$directive" + set directive 0 + end + + set compErr (math (math --scale 0 $directive / $shellCompDirectiveError) %% 2) + if test $compErr -eq 1 + __%[1]s_debug "Received error directive: aborting." + # Might as well do file completion, in case it helps + set --global __%[1]s_comp_do_file_comp 1 + return 1 + end + + set filefilter (math (math --scale 0 $directive / $shellCompDirectiveFilterFileExt) %% 2) + set dirfilter (math (math --scale 0 $directive / $shellCompDirectiveFilterDirs) %% 2) + if test $filefilter -eq 1; or test $dirfilter -eq 1 + __%[1]s_debug "File extension filtering or directory filtering not supported" + # Do full file completion instead + set --global __%[1]s_comp_do_file_comp 1 + return 1 + end + + set nospace (math (math --scale 0 $directive / $shellCompDirectiveNoSpace) %% 2) + set nofiles (math (math --scale 0 $directive / $shellCompDirectiveNoFileComp) %% 2) + + __%[1]s_debug "nospace: $nospace, nofiles: $nofiles" + + # Important not to quote the variable for count to work + set numComps (count $__%[1]s_comp_results) + __%[1]s_debug "numComps: $numComps" + + if test $numComps -eq 1; and test $nospace -ne 0 + # To support the "nospace" directive we trick the shell + # by outputting an extra, longer completion. + __%[1]s_debug "Adding second completion to perform nospace directive" + set --append __%[1]s_comp_results $__%[1]s_comp_results[1]. + end + + if test $numComps -eq 0; and test $nofiles -eq 0 + __%[1]s_debug "Requesting file completion" + set --global __%[1]s_comp_do_file_comp 1 + end + + # If we don't want file completion, we must return true even if there + # are no completions found. This is because fish will perform the last + # completion command, even if its condition is false, if no other + # completion command was triggered + return (not set --query __%[1]s_comp_do_file_comp) +end + +# Since Fish completions are only loaded once the user triggers them, we trigger them ourselves +# so we can properly delete any completions provided by another script. +# The space after the the program name is essential to trigger completion for the program +# and not completion of the program name itself. +complete --do-complete "%[2]s " > /dev/null 2>&1 +# Using '> /dev/null 2>&1' since '&>' is not supported in older versions of fish. + +# Remove any pre-existing completions for the program since we will be handling all of them. +complete -c %[2]s -e + +# The order in which the below two lines are defined is very important so that __%[1]s_prepare_completions +# is called first. It is __%[1]s_prepare_completions that sets up the __%[1]s_comp_do_file_comp variable. +# +# This completion will be run second as complete commands are added FILO. +# It triggers file completion choices when __%[1]s_comp_do_file_comp is set. +complete -c %[2]s -n 'set --query __%[1]s_comp_do_file_comp' + +# This completion will be run first as complete commands are added FILO. +# The call to __%[1]s_prepare_completions will setup both __%[1]s_comp_results and __%[1]s_comp_do_file_comp. +# It provides the program's completion choices. +complete -c %[2]s -n '__%[1]s_prepare_completions' -f -a '$__%[1]s_comp_results' + +`, nameForVar, name, compCmd, + ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs)) +} + +// GenFishCompletion generates fish completion file and writes to the passed writer. +func (c *Command) GenFishCompletion(w io.Writer, includeDesc bool) error { + buf := new(bytes.Buffer) + genFishComp(buf, c.Name(), includeDesc) + _, err := buf.WriteTo(w) + return err +} + +// GenFishCompletionFile generates fish completion file. +func (c *Command) GenFishCompletionFile(filename string, includeDesc bool) error { + outFile, err := os.Create(filename) + if err != nil { + return err + } + defer outFile.Close() + + return c.GenFishCompletion(outFile, includeDesc) +} diff --git a/vendor/github.com/spf13/cobra/fish_completions.md b/vendor/github.com/spf13/cobra/fish_completions.md new file mode 100644 index 0000000..19b2ed1 --- /dev/null +++ b/vendor/github.com/spf13/cobra/fish_completions.md @@ -0,0 +1,4 @@ +## Generating Fish Completions For Your cobra.Command + +Please refer to [Shell Completions](shell_completions.md) for details. + diff --git a/vendor/github.com/spf13/cobra/go.mod b/vendor/github.com/spf13/cobra/go.mod index 9a9eb65..57e3244 100644 --- a/vendor/github.com/spf13/cobra/go.mod +++ b/vendor/github.com/spf13/cobra/go.mod @@ -3,11 +3,10 @@ module github.com/spf13/cobra go 1.12 require ( - github.com/BurntSushi/toml v0.3.1 // indirect - github.com/cpuguy83/go-md2man v1.0.10 + github.com/cpuguy83/go-md2man/v2 v2.0.0 github.com/inconshreveable/mousetrap v1.0.0 github.com/mitchellh/go-homedir v1.1.0 - github.com/spf13/pflag v1.0.3 - github.com/spf13/viper v1.3.2 - gopkg.in/yaml.v2 v2.2.2 + github.com/spf13/pflag v1.0.5 + github.com/spf13/viper v1.7.0 + gopkg.in/yaml.v2 v2.2.8 ) diff --git a/vendor/github.com/spf13/cobra/go.sum b/vendor/github.com/spf13/cobra/go.sum index 9761f4d..0aae738 100644 --- a/vendor/github.com/spf13/cobra/go.sum +++ b/vendor/github.com/spf13/cobra/go.sum @@ -1,51 +1,313 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk= -github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4= +github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= -github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M= -github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.7.0 h1:xVKxvI7ouOI5I+U9s2eeiUfMaWBVoXA3AWskkrqK0VM= +github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0 h1:HyfiK1WMnHj5FXFXatD+Qs1A/xC2Run6RzeW1SyHxpc= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno= +gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= diff --git a/vendor/github.com/spf13/cobra/powershell_completions.md b/vendor/github.com/spf13/cobra/powershell_completions.md index afed802..55f154a 100644 --- a/vendor/github.com/spf13/cobra/powershell_completions.md +++ b/vendor/github.com/spf13/cobra/powershell_completions.md @@ -2,6 +2,8 @@ Cobra can generate PowerShell completion scripts. Users need PowerShell version 5.0 or above, which comes with Windows 10 and can be downloaded separately for Windows 7 or 8.1. They can then write the completions to a file and source this file from their PowerShell profile, which is referenced by the `$Profile` environment variable. See `Get-Help about_Profiles` for more info about PowerShell profiles. +*Note*: PowerShell completions have not (yet?) been aligned to Cobra's generic shell completion support. This implies the PowerShell completions are not as rich as for other shells (see [What's not yet supported](#whats-not-yet-supported)), and may behave slightly differently. They are still very useful for PowerShell users. + # What's supported - Completion for subcommands using their `.Short` description diff --git a/vendor/github.com/spf13/cobra/projects_using_cobra.md b/vendor/github.com/spf13/cobra/projects_using_cobra.md new file mode 100644 index 0000000..31c2720 --- /dev/null +++ b/vendor/github.com/spf13/cobra/projects_using_cobra.md @@ -0,0 +1,35 @@ +## Projects using Cobra + +- [Arduino CLI](https://github.com/arduino/arduino-cli) +- [Bleve](http://www.blevesearch.com/) +- [CockroachDB](http://www.cockroachlabs.com/) +- [Cosmos SDK](https://github.com/cosmos/cosmos-sdk) +- [Delve](https://github.com/derekparker/delve) +- [Docker (distribution)](https://github.com/docker/distribution) +- [Etcd](https://etcd.io/) +- [Gardener](https://github.com/gardener/gardenctl) +- [Giant Swarm's gsctl](https://github.com/giantswarm/gsctl) +- [Git Bump](https://github.com/erdaltsksn/git-bump) +- [Github CLI](https://github.com/cli/cli) +- [GitHub Labeler](https://github.com/erdaltsksn/gh-label) +- [Golangci-lint](https://golangci-lint.run) +- [GopherJS](http://www.gopherjs.org/) +- [Helm](https://helm.sh) +- [Hugo](https://gohugo.io) +- [Istio](https://istio.io) +- [Kool](https://github.com/kool-dev/kool) +- [Kubernetes](http://kubernetes.io/) +- [Linkerd](https://linkerd.io/) +- [Mattermost-server](https://github.com/mattermost/mattermost-server) +- [Metal Stack CLI](https://github.com/metal-stack/metalctl) +- [Moby (former Docker)](https://github.com/moby/moby) +- [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack) +- [OpenShift](https://www.openshift.com/) +- [Pouch](https://github.com/alibaba/pouch) +- [ProjectAtomic (enterprise)](http://www.projectatomic.io/) +- [Prototool](https://github.com/uber/prototool) +- [Random](https://github.com/erdaltsksn/random) +- [Rclone](https://rclone.org/) +- [Skaffold](https://skaffold.dev/) +- [Tendermint](https://github.com/tendermint/tendermint) +- [Werf](https://werf.io/) diff --git a/vendor/github.com/spf13/cobra/shell_completions.go b/vendor/github.com/spf13/cobra/shell_completions.go index ba0af9c..d99bf91 100644 --- a/vendor/github.com/spf13/cobra/shell_completions.go +++ b/vendor/github.com/spf13/cobra/shell_completions.go @@ -4,82 +4,81 @@ import ( "github.com/spf13/pflag" ) -// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists, +// MarkFlagRequired instructs the various shell completion implementations to +// prioritize the named flag when performing completion, // and causes your command to report an error if invoked without the flag. func (c *Command) MarkFlagRequired(name string) error { return MarkFlagRequired(c.Flags(), name) } -// MarkPersistentFlagRequired adds the BashCompOneRequiredFlag annotation to the named persistent flag if it exists, +// MarkPersistentFlagRequired instructs the various shell completion implementations to +// prioritize the named persistent flag when performing completion, // and causes your command to report an error if invoked without the flag. func (c *Command) MarkPersistentFlagRequired(name string) error { return MarkFlagRequired(c.PersistentFlags(), name) } -// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists, +// MarkFlagRequired instructs the various shell completion implementations to +// prioritize the named flag when performing completion, // and causes your command to report an error if invoked without the flag. func MarkFlagRequired(flags *pflag.FlagSet, name string) error { return flags.SetAnnotation(name, BashCompOneRequiredFlag, []string{"true"}) } -// MarkFlagFilename adds the BashCompFilenameExt annotation to the named flag, if it exists. -// Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided. +// MarkFlagFilename instructs the various shell completion implementations to +// limit completions for the named flag to the specified file extensions. func (c *Command) MarkFlagFilename(name string, extensions ...string) error { return MarkFlagFilename(c.Flags(), name, extensions...) } // MarkFlagCustom adds the BashCompCustom annotation to the named flag, if it exists. -// Generated bash autocompletion will call the bash function f for the flag. +// The bash completion script will call the bash function f for the flag. +// +// This will only work for bash completion. +// It is recommended to instead use c.RegisterFlagCompletionFunc(...) which allows +// to register a Go function which will work across all shells. func (c *Command) MarkFlagCustom(name string, f string) error { return MarkFlagCustom(c.Flags(), name, f) } // MarkPersistentFlagFilename instructs the various shell completion -// implementations to limit completions for this persistent flag to the -// specified extensions (patterns). -// -// Shell Completion compatibility matrix: bash, zsh +// implementations to limit completions for the named persistent flag to the +// specified file extensions. func (c *Command) MarkPersistentFlagFilename(name string, extensions ...string) error { return MarkFlagFilename(c.PersistentFlags(), name, extensions...) } // MarkFlagFilename instructs the various shell completion implementations to -// limit completions for this flag to the specified extensions (patterns). -// -// Shell Completion compatibility matrix: bash, zsh +// limit completions for the named flag to the specified file extensions. func MarkFlagFilename(flags *pflag.FlagSet, name string, extensions ...string) error { return flags.SetAnnotation(name, BashCompFilenameExt, extensions) } -// MarkFlagCustom instructs the various shell completion implementations to -// limit completions for this flag to the specified extensions (patterns). +// MarkFlagCustom adds the BashCompCustom annotation to the named flag, if it exists. +// The bash completion script will call the bash function f for the flag. // -// Shell Completion compatibility matrix: bash, zsh +// This will only work for bash completion. +// It is recommended to instead use c.RegisterFlagCompletionFunc(...) which allows +// to register a Go function which will work across all shells. func MarkFlagCustom(flags *pflag.FlagSet, name string, f string) error { return flags.SetAnnotation(name, BashCompCustom, []string{f}) } // MarkFlagDirname instructs the various shell completion implementations to -// complete only directories with this named flag. -// -// Shell Completion compatibility matrix: zsh +// limit completions for the named flag to directory names. func (c *Command) MarkFlagDirname(name string) error { return MarkFlagDirname(c.Flags(), name) } // MarkPersistentFlagDirname instructs the various shell completion -// implementations to complete only directories with this persistent named flag. -// -// Shell Completion compatibility matrix: zsh +// implementations to limit completions for the named persistent flag to +// directory names. func (c *Command) MarkPersistentFlagDirname(name string) error { return MarkFlagDirname(c.PersistentFlags(), name) } // MarkFlagDirname instructs the various shell completion implementations to -// complete only directories with this specified flag. -// -// Shell Completion compatibility matrix: zsh +// limit completions for the named flag to directory names. func MarkFlagDirname(flags *pflag.FlagSet, name string) error { - zshPattern := "-(/)" - return flags.SetAnnotation(name, zshCompDirname, []string{zshPattern}) + return flags.SetAnnotation(name, BashCompSubdirsInDir, []string{}) } diff --git a/vendor/github.com/spf13/cobra/shell_completions.md b/vendor/github.com/spf13/cobra/shell_completions.md new file mode 100644 index 0000000..d8416ab --- /dev/null +++ b/vendor/github.com/spf13/cobra/shell_completions.md @@ -0,0 +1,434 @@ +# Generating shell completions + +Cobra can generate shell completions for multiple shells. +The currently supported shells are: +- Bash +- Zsh +- Fish +- PowerShell + +If you are using the generator you can create a completion command by running + +```bash +cobra add completion +``` +and then modifying the generated `cmd/completion.go` file to look something like this +(writing the shell script to stdout allows the most flexible use): + +```go +var completionCmd = &cobra.Command{ + Use: "completion [bash|zsh|fish|powershell]", + Short: "Generate completion script", + Long: `To load completions: + +Bash: + +$ source <(yourprogram completion bash) + +# To load completions for each session, execute once: +Linux: + $ yourprogram completion bash > /etc/bash_completion.d/yourprogram +MacOS: + $ yourprogram completion bash > /usr/local/etc/bash_completion.d/yourprogram + +Zsh: + +# If shell completion is not already enabled in your environment you will need +# to enable it. You can execute the following once: + +$ echo "autoload -U compinit; compinit" >> ~/.zshrc + +# To load completions for each session, execute once: +$ yourprogram completion zsh > "${fpath[1]}/_yourprogram" + +# You will need to start a new shell for this setup to take effect. + +Fish: + +$ yourprogram completion fish | source + +# To load completions for each session, execute once: +$ yourprogram completion fish > ~/.config/fish/completions/yourprogram.fish +`, + DisableFlagsInUseLine: true, + ValidArgs: []string{"bash", "zsh", "fish", "powershell"}, + Args: cobra.ExactValidArgs(1), + Run: func(cmd *cobra.Command, args []string) { + switch args[0] { + case "bash": + cmd.Root().GenBashCompletion(os.Stdout) + case "zsh": + cmd.Root().GenZshCompletion(os.Stdout) + case "fish": + cmd.Root().GenFishCompletion(os.Stdout, true) + case "powershell": + cmd.Root().GenPowerShellCompletion(os.Stdout) + } + }, +} +``` + +**Note:** The cobra generator may include messages printed to stdout for example if the config file is loaded, this will break the auto complete script so must be removed. + +# Customizing completions + +The generated completion scripts will automatically handle completing commands and flags. However, you can make your completions much more powerful by providing information to complete your program's nouns and flag values. + +## Completion of nouns + +### Static completion of nouns + +Cobra allows you to provide a pre-defined list of completion choices for your nouns using the `ValidArgs` field. +For example, if you want `kubectl get [tab][tab]` to show a list of valid "nouns" you have to set them. +Some simplified code from `kubectl get` looks like: + +```go +validArgs []string = { "pod", "node", "service", "replicationcontroller" } + +cmd := &cobra.Command{ + Use: "get [(-o|--output=)json|yaml|template|...] (RESOURCE [NAME] | RESOURCE/NAME ...)", + Short: "Display one or many resources", + Long: get_long, + Example: get_example, + Run: func(cmd *cobra.Command, args []string) { + err := RunGet(f, out, cmd, args) + util.CheckErr(err) + }, + ValidArgs: validArgs, +} +``` + +Notice we put the `ValidArgs` field on the `get` sub-command. Doing so will give results like: + +```bash +$ kubectl get [tab][tab] +node pod replicationcontroller service +``` + +#### Aliases for nouns + +If your nouns have aliases, you can define them alongside `ValidArgs` using `ArgAliases`: + +```go +argAliases []string = { "pods", "nodes", "services", "svc", "replicationcontrollers", "rc" } + +cmd := &cobra.Command{ + ... + ValidArgs: validArgs, + ArgAliases: argAliases +} +``` + +The aliases are not shown to the user on tab completion, but they are accepted as valid nouns by +the completion algorithm if entered manually, e.g. in: + +```bash +$ kubectl get rc [tab][tab] +backend frontend database +``` + +Note that without declaring `rc` as an alias, the completion algorithm would not know to show the list of +replication controllers following `rc`. + +### Dynamic completion of nouns + +In some cases it is not possible to provide a list of completions in advance. Instead, the list of completions must be determined at execution-time. In a similar fashion as for static completions, you can use the `ValidArgsFunction` field to provide a Go function that Cobra will execute when it needs the list of completion choices for the nouns of a command. Note that either `ValidArgs` or `ValidArgsFunction` can be used for a single cobra command, but not both. +Simplified code from `helm status` looks like: + +```go +cmd := &cobra.Command{ + Use: "status RELEASE_NAME", + Short: "Display the status of the named release", + Long: status_long, + RunE: func(cmd *cobra.Command, args []string) { + RunGet(args[0]) + }, + ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + if len(args) != 0 { + return nil, cobra.ShellCompDirectiveNoFileComp + } + return getReleasesFromCluster(toComplete), cobra.ShellCompDirectiveNoFileComp + }, +} +``` +Where `getReleasesFromCluster()` is a Go function that obtains the list of current Helm releases running on the Kubernetes cluster. +Notice we put the `ValidArgsFunction` on the `status` sub-command. Let's assume the Helm releases on the cluster are: `harbor`, `notary`, `rook` and `thanos` then this dynamic completion will give results like: + +```bash +$ helm status [tab][tab] +harbor notary rook thanos +``` +You may have noticed the use of `cobra.ShellCompDirective`. These directives are bit fields allowing to control some shell completion behaviors for your particular completion. You can combine them with the bit-or operator such as `cobra.ShellCompDirectiveNoSpace | cobra.ShellCompDirectiveNoFileComp` +```go +// Indicates that the shell will perform its default behavior after completions +// have been provided (this implies none of the other directives). +ShellCompDirectiveDefault + +// Indicates an error occurred and completions should be ignored. +ShellCompDirectiveError + +// Indicates that the shell should not add a space after the completion, +// even if there is a single completion provided. +ShellCompDirectiveNoSpace + +// Indicates that the shell should not provide file completion even when +// no completion is provided. +ShellCompDirectiveNoFileComp + +// Indicates that the returned completions should be used as file extension filters. +// For example, to complete only files of the form *.json or *.yaml: +// return []string{"yaml", "json"}, ShellCompDirectiveFilterFileExt +// For flags, using MarkFlagFilename() and MarkPersistentFlagFilename() +// is a shortcut to using this directive explicitly. +// +ShellCompDirectiveFilterFileExt + +// Indicates that only directory names should be provided in file completion. +// For example: +// return nil, ShellCompDirectiveFilterDirs +// For flags, using MarkFlagDirname() is a shortcut to using this directive explicitly. +// +// To request directory names within another directory, the returned completions +// should specify a single directory name within which to search. For example, +// to complete directories within "themes/": +// return []string{"themes"}, ShellCompDirectiveFilterDirs +// +ShellCompDirectiveFilterDirs +``` + +***Note***: When using the `ValidArgsFunction`, Cobra will call your registered function after having parsed all flags and arguments provided in the command-line. You therefore don't need to do this parsing yourself. For example, when a user calls `helm status --namespace my-rook-ns [tab][tab]`, Cobra will call your registered `ValidArgsFunction` after having parsed the `--namespace` flag, as it would have done when calling the `RunE` function. + +#### Debugging + +Cobra achieves dynamic completion through the use of a hidden command called by the completion script. To debug your Go completion code, you can call this hidden command directly: +```bash +$ helm __complete status har +harbor +:4 +Completion ended with directive: ShellCompDirectiveNoFileComp # This is on stderr +``` +***Important:*** If the noun to complete is empty (when the user has not yet typed any letters of that noun), you must pass an empty parameter to the `__complete` command: +```bash +$ helm __complete status "" +harbor +notary +rook +thanos +:4 +Completion ended with directive: ShellCompDirectiveNoFileComp # This is on stderr +``` +Calling the `__complete` command directly allows you to run the Go debugger to troubleshoot your code. You can also add printouts to your code; Cobra provides the following functions to use for printouts in Go completion code: +```go +// Prints to the completion script debug file (if BASH_COMP_DEBUG_FILE +// is set to a file path) and optionally prints to stderr. +cobra.CompDebug(msg string, printToStdErr bool) { +cobra.CompDebugln(msg string, printToStdErr bool) + +// Prints to the completion script debug file (if BASH_COMP_DEBUG_FILE +// is set to a file path) and to stderr. +cobra.CompError(msg string) +cobra.CompErrorln(msg string) +``` +***Important:*** You should **not** leave traces that print directly to stdout in your completion code as they will be interpreted as completion choices by the completion script. Instead, use the cobra-provided debugging traces functions mentioned above. + +## Completions for flags + +### Mark flags as required + +Most of the time completions will only show sub-commands. But if a flag is required to make a sub-command work, you probably want it to show up when the user types [tab][tab]. You can mark a flag as 'Required' like so: + +```go +cmd.MarkFlagRequired("pod") +cmd.MarkFlagRequired("container") +``` + +and you'll get something like + +```bash +$ kubectl exec [tab][tab] +-c --container= -p --pod= +``` + +### Specify dynamic flag completion + +As for nouns, Cobra provides a way of defining dynamic completion of flags. To provide a Go function that Cobra will execute when it needs the list of completion choices for a flag, you must register the function using the `command.RegisterFlagCompletionFunc()` function. + +```go +flagName := "output" +cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"json", "table", "yaml"}, cobra.ShellCompDirectiveDefault +}) +``` +Notice that calling `RegisterFlagCompletionFunc()` is done through the `command` with which the flag is associated. In our example this dynamic completion will give results like so: + +```bash +$ helm status --output [tab][tab] +json table yaml +``` + +#### Debugging + +You can also easily debug your Go completion code for flags: +```bash +$ helm __complete status --output "" +json +table +yaml +:4 +Completion ended with directive: ShellCompDirectiveNoFileComp # This is on stderr +``` +***Important:*** You should **not** leave traces that print to stdout in your completion code as they will be interpreted as completion choices by the completion script. Instead, use the cobra-provided debugging traces functions mentioned further above. + +### Specify valid filename extensions for flags that take a filename + +To limit completions of flag values to file names with certain extensions you can either use the different `MarkFlagFilename()` functions or a combination of `RegisterFlagCompletionFunc()` and `ShellCompDirectiveFilterFileExt`, like so: +```go +flagName := "output" +cmd.MarkFlagFilename(flagName, "yaml", "json") +``` +or +```go +flagName := "output" +cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"yaml", "json"}, ShellCompDirectiveFilterFileExt}) +``` + +### Limit flag completions to directory names + +To limit completions of flag values to directory names you can either use the `MarkFlagDirname()` functions or a combination of `RegisterFlagCompletionFunc()` and `ShellCompDirectiveFilterDirs`, like so: +```go +flagName := "output" +cmd.MarkFlagDirname(flagName) +``` +or +```go +flagName := "output" +cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return nil, cobra.ShellCompDirectiveFilterDirs +}) +``` +To limit completions of flag values to directory names *within another directory* you can use a combination of `RegisterFlagCompletionFunc()` and `ShellCompDirectiveFilterDirs` like so: +```go +flagName := "output" +cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"themes"}, cobra.ShellCompDirectiveFilterDirs +}) +``` +### Descriptions for completions + +Both `zsh` and `fish` allow for descriptions to annotate completion choices. For commands and flags, Cobra will provide the descriptions automatically, based on usage information. For example, using zsh: +``` +$ helm s[tab] +search -- search for a keyword in charts +show -- show information of a chart +status -- displays the status of the named release +``` +while using fish: +``` +$ helm s[tab] +search (search for a keyword in charts) show (show information of a chart) status (displays the status of the named release) +``` + +Cobra allows you to add annotations to your own completions. Simply add the annotation text after each completion, following a `\t` separator. This technique applies to completions returned by `ValidArgs`, `ValidArgsFunction` and `RegisterFlagCompletionFunc()`. For example: +```go +ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"harbor\tAn image registry", "thanos\tLong-term metrics"}, cobra.ShellCompDirectiveNoFileComp +}} +``` +or +```go +ValidArgs: []string{"bash\tCompletions for bash", "zsh\tCompletions for zsh"} +``` +## Bash completions + +### Dependencies + +The bash completion script generated by Cobra requires the `bash_completion` package. You should update the help text of your completion command to show how to install the `bash_completion` package ([Kubectl docs](https://kubernetes.io/docs/tasks/tools/install-kubectl/#enabling-shell-autocompletion)) + +### Aliases + +You can also configure `bash` aliases for your program and they will also support completions. + +```bash +alias aliasname=origcommand +complete -o default -F __start_origcommand aliasname + +# and now when you run `aliasname` completion will make +# suggestions as it did for `origcommand`. + +$ aliasname +completion firstcommand secondcommand +``` +### Bash legacy dynamic completions + +For backwards-compatibility, Cobra still supports its bash legacy dynamic completion solution. +Please refer to [Bash Completions](bash_completions.md) for details. + +## Zsh completions + +Cobra supports native Zsh completion generated from the root `cobra.Command`. +The generated completion script should be put somewhere in your `$fpath` and be named +`_`. You will need to start a new shell for the completions to become available. + +Zsh supports descriptions for completions. Cobra will provide the description automatically, +based on usage information. Cobra provides a way to completely disable such descriptions by +using `GenZshCompletionNoDesc()` or `GenZshCompletionFileNoDesc()`. You can choose to make +this a configurable option to your users. +``` +# With descriptions +$ helm s[tab] +search -- search for a keyword in charts +show -- show information of a chart +status -- displays the status of the named release + +# Without descriptions +$ helm s[tab] +search show status +``` +*Note*: Because of backwards-compatibility requirements, we were forced to have a different API to disable completion descriptions between `Zsh` and `Fish`. + +### Limitations + +* Custom completions implemented in Bash scripting (legacy) are not supported and will be ignored for `zsh` (including the use of the `BashCompCustom` flag annotation). + * You should instead use `ValidArgsFunction` and `RegisterFlagCompletionFunc()` which are portable to the different shells (`bash`, `zsh`, `fish`). +* The function `MarkFlagCustom()` is not supported and will be ignored for `zsh`. + * You should instead use `RegisterFlagCompletionFunc()`. + +### Zsh completions standardization + +Cobra 1.1 standardized its zsh completion support to align it with its other shell completions. Although the API was kept backwards-compatible, some small changes in behavior were introduced. +Please refer to [Zsh Completions](zsh_completions.md) for details. + +## Fish completions + +Cobra supports native Fish completions generated from the root `cobra.Command`. You can use the `command.GenFishCompletion()` or `command.GenFishCompletionFile()` functions. You must provide these functions with a parameter indicating if the completions should be annotated with a description; Cobra will provide the description automatically based on usage information. You can choose to make this option configurable by your users. +``` +# With descriptions +$ helm s[tab] +search (search for a keyword in charts) show (show information of a chart) status (displays the status of the named release) + +# Without descriptions +$ helm s[tab] +search show status +``` +*Note*: Because of backwards-compatibility requirements, we were forced to have a different API to disable completion descriptions between `Zsh` and `Fish`. + +### Limitations + +* Custom completions implemented in Bash scripting (legacy) are not supported and will be ignored for `fish` (including the use of the `BashCompCustom` flag annotation). + * You should instead use `ValidArgsFunction` and `RegisterFlagCompletionFunc()` which are portable to the different shells (`bash`, `zsh`, `fish`). +* The function `MarkFlagCustom()` is not supported and will be ignored for `fish`. + * You should instead use `RegisterFlagCompletionFunc()`. +* The following flag completion annotations are not supported and will be ignored for `fish`: + * `BashCompFilenameExt` (filtering by file extension) + * `BashCompSubdirsInDir` (filtering by directory) +* The functions corresponding to the above annotations are consequently not supported and will be ignored for `fish`: + * `MarkFlagFilename()` and `MarkPersistentFlagFilename()` (filtering by file extension) + * `MarkFlagDirname()` and `MarkPersistentFlagDirname()` (filtering by directory) +* Similarly, the following completion directives are not supported and will be ignored for `fish`: + * `ShellCompDirectiveFilterFileExt` (filtering by file extension) + * `ShellCompDirectiveFilterDirs` (filtering by directory) + +## PowerShell completions + +Please refer to [PowerShell Completions](powershell_completions.md) for details. diff --git a/vendor/github.com/spf13/cobra/zsh_completions.go b/vendor/github.com/spf13/cobra/zsh_completions.go index 1275548..92a7039 100644 --- a/vendor/github.com/spf13/cobra/zsh_completions.go +++ b/vendor/github.com/spf13/cobra/zsh_completions.go @@ -1,336 +1,240 @@ package cobra import ( - "encoding/json" + "bytes" "fmt" "io" "os" - "sort" - "strings" - "text/template" - - "github.com/spf13/pflag" -) - -const ( - zshCompArgumentAnnotation = "cobra_annotations_zsh_completion_argument_annotation" - zshCompArgumentFilenameComp = "cobra_annotations_zsh_completion_argument_file_completion" - zshCompArgumentWordComp = "cobra_annotations_zsh_completion_argument_word_completion" - zshCompDirname = "cobra_annotations_zsh_dirname" ) -var ( - zshCompFuncMap = template.FuncMap{ - "genZshFuncName": zshCompGenFuncName, - "extractFlags": zshCompExtractFlag, - "genFlagEntryForZshArguments": zshCompGenFlagEntryForArguments, - "extractArgsCompletions": zshCompExtractArgumentCompletionHintsForRendering, - } - zshCompletionText = ` -{{/* should accept Command (that contains subcommands) as parameter */}} -{{define "argumentsC" -}} -{{ $cmdPath := genZshFuncName .}} -function {{$cmdPath}} { - local -a commands - - _arguments -C \{{- range extractFlags .}} - {{genFlagEntryForZshArguments .}} \{{- end}} - "1: :->cmnds" \ - "*::arg:->args" - - case $state in - cmnds) - commands=({{range .Commands}}{{if not .Hidden}} - "{{.Name}}:{{.Short}}"{{end}}{{end}} - ) - _describe "command" commands - ;; - esac - - case "$words[1]" in {{- range .Commands}}{{if not .Hidden}} - {{.Name}}) - {{$cmdPath}}_{{.Name}} - ;;{{end}}{{end}} - esac -} -{{range .Commands}}{{if not .Hidden}} -{{template "selectCmdTemplate" .}} -{{- end}}{{end}} -{{- end}} - -{{/* should accept Command without subcommands as parameter */}} -{{define "arguments" -}} -function {{genZshFuncName .}} { -{{" _arguments"}}{{range extractFlags .}} \ - {{genFlagEntryForZshArguments . -}} -{{end}}{{range extractArgsCompletions .}} \ - {{.}}{{end}} -} -{{end}} - -{{/* dispatcher for commands with or without subcommands */}} -{{define "selectCmdTemplate" -}} -{{if .Hidden}}{{/* ignore hidden*/}}{{else -}} -{{if .Commands}}{{template "argumentsC" .}}{{else}}{{template "arguments" .}}{{end}} -{{- end}} -{{- end}} - -{{/* template entry point */}} -{{define "Main" -}} -#compdef _{{.Name}} {{.Name}} - -{{template "selectCmdTemplate" .}} -{{end}} -` -) - -// zshCompArgsAnnotation is used to encode/decode zsh completion for -// arguments to/from Command.Annotations. -type zshCompArgsAnnotation map[int]zshCompArgHint - -type zshCompArgHint struct { - // Indicates the type of the completion to use. One of: - // zshCompArgumentFilenameComp or zshCompArgumentWordComp - Tipe string `json:"type"` - - // A value for the type above (globs for file completion or words) - Options []string `json:"options"` -} - -// GenZshCompletionFile generates zsh completion file. +// GenZshCompletionFile generates zsh completion file including descriptions. func (c *Command) GenZshCompletionFile(filename string) error { - outFile, err := os.Create(filename) - if err != nil { - return err - } - defer outFile.Close() - - return c.GenZshCompletion(outFile) + return c.genZshCompletionFile(filename, true) } -// GenZshCompletion generates a zsh completion file and writes to the passed -// writer. The completion always run on the root command regardless of the -// command it was called from. +// GenZshCompletion generates zsh completion file including descriptions +// and writes it to the passed writer. func (c *Command) GenZshCompletion(w io.Writer) error { - tmpl, err := template.New("Main").Funcs(zshCompFuncMap).Parse(zshCompletionText) - if err != nil { - return fmt.Errorf("error creating zsh completion template: %v", err) - } - return tmpl.Execute(w, c.Root()) -} - -// MarkZshCompPositionalArgumentFile marks the specified argument (first -// argument is 1) as completed by file selection. patterns (e.g. "*.txt") are -// optional - if not provided the completion will search for all files. -func (c *Command) MarkZshCompPositionalArgumentFile(argPosition int, patterns ...string) error { - if argPosition < 1 { - return fmt.Errorf("Invalid argument position (%d)", argPosition) - } - annotation, err := c.zshCompGetArgsAnnotations() - if err != nil { - return err - } - if c.zshcompArgsAnnotationnIsDuplicatePosition(annotation, argPosition) { - return fmt.Errorf("Duplicate annotation for positional argument at index %d", argPosition) - } - annotation[argPosition] = zshCompArgHint{ - Tipe: zshCompArgumentFilenameComp, - Options: patterns, - } - return c.zshCompSetArgsAnnotations(annotation) -} - -// MarkZshCompPositionalArgumentWords marks the specified positional argument -// (first argument is 1) as completed by the provided words. At east one word -// must be provided, spaces within words will be offered completion with -// "word\ word". -func (c *Command) MarkZshCompPositionalArgumentWords(argPosition int, words ...string) error { - if argPosition < 1 { - return fmt.Errorf("Invalid argument position (%d)", argPosition) - } - if len(words) == 0 { - return fmt.Errorf("Trying to set empty word list for positional argument %d", argPosition) - } - annotation, err := c.zshCompGetArgsAnnotations() - if err != nil { - return err - } - if c.zshcompArgsAnnotationnIsDuplicatePosition(annotation, argPosition) { - return fmt.Errorf("Duplicate annotation for positional argument at index %d", argPosition) - } - annotation[argPosition] = zshCompArgHint{ - Tipe: zshCompArgumentWordComp, - Options: words, - } - return c.zshCompSetArgsAnnotations(annotation) -} - -func zshCompExtractArgumentCompletionHintsForRendering(c *Command) ([]string, error) { - var result []string - annotation, err := c.zshCompGetArgsAnnotations() - if err != nil { - return nil, err - } - for k, v := range annotation { - s, err := zshCompRenderZshCompArgHint(k, v) - if err != nil { - return nil, err - } - result = append(result, s) - } - if len(c.ValidArgs) > 0 { - if _, positionOneExists := annotation[1]; !positionOneExists { - s, err := zshCompRenderZshCompArgHint(1, zshCompArgHint{ - Tipe: zshCompArgumentWordComp, - Options: c.ValidArgs, - }) - if err != nil { - return nil, err - } - result = append(result, s) - } - } - sort.Strings(result) - return result, nil + return c.genZshCompletion(w, true) } -func zshCompRenderZshCompArgHint(i int, z zshCompArgHint) (string, error) { - switch t := z.Tipe; t { - case zshCompArgumentFilenameComp: - var globs []string - for _, g := range z.Options { - globs = append(globs, fmt.Sprintf(`-g "%s"`, g)) - } - return fmt.Sprintf(`'%d: :_files %s'`, i, strings.Join(globs, " ")), nil - case zshCompArgumentWordComp: - var words []string - for _, w := range z.Options { - words = append(words, fmt.Sprintf("%q", w)) - } - return fmt.Sprintf(`'%d: :(%s)'`, i, strings.Join(words, " ")), nil - default: - return "", fmt.Errorf("Invalid zsh argument completion annotation: %s", t) - } +// GenZshCompletionFileNoDesc generates zsh completion file without descriptions. +func (c *Command) GenZshCompletionFileNoDesc(filename string) error { + return c.genZshCompletionFile(filename, false) } -func (c *Command) zshcompArgsAnnotationnIsDuplicatePosition(annotation zshCompArgsAnnotation, position int) bool { - _, dup := annotation[position] - return dup +// GenZshCompletionNoDesc generates zsh completion file without descriptions +// and writes it to the passed writer. +func (c *Command) GenZshCompletionNoDesc(w io.Writer) error { + return c.genZshCompletion(w, false) } -func (c *Command) zshCompGetArgsAnnotations() (zshCompArgsAnnotation, error) { - annotation := make(zshCompArgsAnnotation) - annotationString, ok := c.Annotations[zshCompArgumentAnnotation] - if !ok { - return annotation, nil - } - err := json.Unmarshal([]byte(annotationString), &annotation) - if err != nil { - return annotation, fmt.Errorf("Error unmarshaling zsh argument annotation: %v", err) - } - return annotation, nil -} - -func (c *Command) zshCompSetArgsAnnotations(annotation zshCompArgsAnnotation) error { - jsn, err := json.Marshal(annotation) - if err != nil { - return fmt.Errorf("Error marshaling zsh argument annotation: %v", err) - } - if c.Annotations == nil { - c.Annotations = make(map[string]string) - } - c.Annotations[zshCompArgumentAnnotation] = string(jsn) +// MarkZshCompPositionalArgumentFile only worked for zsh and its behavior was +// not consistent with Bash completion. It has therefore been disabled. +// Instead, when no other completion is specified, file completion is done by +// default for every argument. One can disable file completion on a per-argument +// basis by using ValidArgsFunction and ShellCompDirectiveNoFileComp. +// To achieve file extension filtering, one can use ValidArgsFunction and +// ShellCompDirectiveFilterFileExt. +// +// Deprecated +func (c *Command) MarkZshCompPositionalArgumentFile(argPosition int, patterns ...string) error { return nil } -func zshCompGenFuncName(c *Command) string { - if c.HasParent() { - return zshCompGenFuncName(c.Parent()) + "_" + c.Name() - } - return "_" + c.Name() -} - -func zshCompExtractFlag(c *Command) []*pflag.Flag { - var flags []*pflag.Flag - c.LocalFlags().VisitAll(func(f *pflag.Flag) { - if !f.Hidden { - flags = append(flags, f) - } - }) - c.InheritedFlags().VisitAll(func(f *pflag.Flag) { - if !f.Hidden { - flags = append(flags, f) - } - }) - return flags -} - -// zshCompGenFlagEntryForArguments returns an entry that matches _arguments -// zsh-completion parameters. It's too complicated to generate in a template. -func zshCompGenFlagEntryForArguments(f *pflag.Flag) string { - if f.Name == "" || f.Shorthand == "" { - return zshCompGenFlagEntryForSingleOptionFlag(f) - } - return zshCompGenFlagEntryForMultiOptionFlag(f) -} - -func zshCompGenFlagEntryForSingleOptionFlag(f *pflag.Flag) string { - var option, multiMark, extras string - - if zshCompFlagCouldBeSpecifiedMoreThenOnce(f) { - multiMark = "*" - } - - option = "--" + f.Name - if option == "--" { - option = "-" + f.Shorthand - } - extras = zshCompGenFlagEntryExtras(f) - - return fmt.Sprintf(`'%s%s[%s]%s'`, multiMark, option, zshCompQuoteFlagDescription(f.Usage), extras) -} - -func zshCompGenFlagEntryForMultiOptionFlag(f *pflag.Flag) string { - var options, parenMultiMark, curlyMultiMark, extras string - - if zshCompFlagCouldBeSpecifiedMoreThenOnce(f) { - parenMultiMark = "*" - curlyMultiMark = "\\*" - } - - options = fmt.Sprintf(`'(%s-%s %s--%s)'{%s-%s,%s--%s}`, - parenMultiMark, f.Shorthand, parenMultiMark, f.Name, curlyMultiMark, f.Shorthand, curlyMultiMark, f.Name) - extras = zshCompGenFlagEntryExtras(f) - - return fmt.Sprintf(`%s'[%s]%s'`, options, zshCompQuoteFlagDescription(f.Usage), extras) +// MarkZshCompPositionalArgumentWords only worked for zsh. It has therefore +// been disabled. +// To achieve the same behavior across all shells, one can use +// ValidArgs (for the first argument only) or ValidArgsFunction for +// any argument (can include the first one also). +// +// Deprecated +func (c *Command) MarkZshCompPositionalArgumentWords(argPosition int, words ...string) error { + return nil } -func zshCompGenFlagEntryExtras(f *pflag.Flag) string { - if f.NoOptDefVal != "" { - return "" - } - - extras := ":" // allow options for flag (even without assistance) - for key, values := range f.Annotations { - switch key { - case zshCompDirname: - extras = fmt.Sprintf(":filename:_files -g %q", values[0]) - case BashCompFilenameExt: - extras = ":filename:_files" - for _, pattern := range values { - extras = extras + fmt.Sprintf(` -g "%s"`, pattern) - } - } +func (c *Command) genZshCompletionFile(filename string, includeDesc bool) error { + outFile, err := os.Create(filename) + if err != nil { + return err } + defer outFile.Close() - return extras -} - -func zshCompFlagCouldBeSpecifiedMoreThenOnce(f *pflag.Flag) bool { - return strings.Contains(f.Value.Type(), "Slice") || - strings.Contains(f.Value.Type(), "Array") -} - -func zshCompQuoteFlagDescription(s string) string { - return strings.Replace(s, "'", `'\''`, -1) + return c.genZshCompletion(outFile, includeDesc) +} + +func (c *Command) genZshCompletion(w io.Writer, includeDesc bool) error { + buf := new(bytes.Buffer) + genZshComp(buf, c.Name(), includeDesc) + _, err := buf.WriteTo(w) + return err +} + +func genZshComp(buf *bytes.Buffer, name string, includeDesc bool) { + compCmd := ShellCompRequestCmd + if !includeDesc { + compCmd = ShellCompNoDescRequestCmd + } + buf.WriteString(fmt.Sprintf(`#compdef _%[1]s %[1]s + +# zsh completion for %-36[1]s -*- shell-script -*- + +__%[1]s_debug() +{ + local file="$BASH_COMP_DEBUG_FILE" + if [[ -n ${file} ]]; then + echo "$*" >> "${file}" + fi +} + +_%[1]s() +{ + local shellCompDirectiveError=%[3]d + local shellCompDirectiveNoSpace=%[4]d + local shellCompDirectiveNoFileComp=%[5]d + local shellCompDirectiveFilterFileExt=%[6]d + local shellCompDirectiveFilterDirs=%[7]d + + local lastParam lastChar flagPrefix requestComp out directive compCount comp lastComp + local -a completions + + __%[1]s_debug "\n========= starting completion logic ==========" + __%[1]s_debug "CURRENT: ${CURRENT}, words[*]: ${words[*]}" + + # The user could have moved the cursor backwards on the command-line. + # We need to trigger completion from the $CURRENT location, so we need + # to truncate the command-line ($words) up to the $CURRENT location. + # (We cannot use $CURSOR as its value does not work when a command is an alias.) + words=("${=words[1,CURRENT]}") + __%[1]s_debug "Truncated words[*]: ${words[*]}," + + lastParam=${words[-1]} + lastChar=${lastParam[-1]} + __%[1]s_debug "lastParam: ${lastParam}, lastChar: ${lastChar}" + + # For zsh, when completing a flag with an = (e.g., %[1]s -n=) + # completions must be prefixed with the flag + setopt local_options BASH_REMATCH + if [[ "${lastParam}" =~ '-.*=' ]]; then + # We are dealing with a flag with an = + flagPrefix="-P ${BASH_REMATCH}" + fi + + # Prepare the command to obtain completions + requestComp="${words[1]} %[2]s ${words[2,-1]}" + if [ "${lastChar}" = "" ]; then + # If the last parameter is complete (there is a space following it) + # We add an extra empty parameter so we can indicate this to the go completion code. + __%[1]s_debug "Adding extra empty parameter" + requestComp="${requestComp} \"\"" + fi + + __%[1]s_debug "About to call: eval ${requestComp}" + + # Use eval to handle any environment variables and such + out=$(eval ${requestComp} 2>/dev/null) + __%[1]s_debug "completion output: ${out}" + + # Extract the directive integer following a : from the last line + local lastLine + while IFS='\n' read -r line; do + lastLine=${line} + done < <(printf "%%s\n" "${out[@]}") + __%[1]s_debug "last line: ${lastLine}" + + if [ "${lastLine[1]}" = : ]; then + directive=${lastLine[2,-1]} + # Remove the directive including the : and the newline + local suffix + (( suffix=${#lastLine}+2)) + out=${out[1,-$suffix]} + else + # There is no directive specified. Leave $out as is. + __%[1]s_debug "No directive found. Setting do default" + directive=0 + fi + + __%[1]s_debug "directive: ${directive}" + __%[1]s_debug "completions: ${out}" + __%[1]s_debug "flagPrefix: ${flagPrefix}" + + if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then + __%[1]s_debug "Completion received error. Ignoring completions." + return + fi + + compCount=0 + while IFS='\n' read -r comp; do + if [ -n "$comp" ]; then + # If requested, completions are returned with a description. + # The description is preceded by a TAB character. + # For zsh's _describe, we need to use a : instead of a TAB. + # We first need to escape any : as part of the completion itself. + comp=${comp//:/\\:} + + local tab=$(printf '\t') + comp=${comp//$tab/:} + + ((compCount++)) + __%[1]s_debug "Adding completion: ${comp}" + completions+=${comp} + lastComp=$comp + fi + done < <(printf "%%s\n" "${out[@]}") + + if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then + # File extension filtering + local filteringCmd + filteringCmd='_files' + for filter in ${completions[@]}; do + if [ ${filter[1]} != '*' ]; then + # zsh requires a glob pattern to do file filtering + filter="\*.$filter" + fi + filteringCmd+=" -g $filter" + done + filteringCmd+=" ${flagPrefix}" + + __%[1]s_debug "File filtering command: $filteringCmd" + _arguments '*:filename:'"$filteringCmd" + elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then + # File completion for directories only + local subDir + subdir="${completions[1]}" + if [ -n "$subdir" ]; then + __%[1]s_debug "Listing directories in $subdir" + pushd "${subdir}" >/dev/null 2>&1 + else + __%[1]s_debug "Listing directories in ." + fi + + _arguments '*:dirname:_files -/'" ${flagPrefix}" + if [ -n "$subdir" ]; then + popd >/dev/null 2>&1 + fi + elif [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ] && [ ${compCount} -eq 1 ]; then + __%[1]s_debug "Activating nospace." + # We can use compadd here as there is no description when + # there is only one completion. + compadd -S '' "${lastComp}" + elif [ ${compCount} -eq 0 ]; then + if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then + __%[1]s_debug "deactivating file completion" + else + # Perform file completion + __%[1]s_debug "activating file completion" + _arguments '*:filename:_files'" ${flagPrefix}" + fi + else + _describe "completions" completions $(echo $flagPrefix) + fi +} + +# don't run the completion function when being source-ed or eval-ed +if [ "$funcstack[1]" = "_%[1]s" ]; then + _%[1]s +fi +`, name, compCmd, + ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs)) } diff --git a/vendor/github.com/spf13/cobra/zsh_completions.md b/vendor/github.com/spf13/cobra/zsh_completions.md index df9c2ea..7cff617 100644 --- a/vendor/github.com/spf13/cobra/zsh_completions.md +++ b/vendor/github.com/spf13/cobra/zsh_completions.md @@ -1,39 +1,48 @@ -## Generating Zsh Completion for your cobra.Command - -Cobra supports native Zsh completion generated from the root `cobra.Command`. -The generated completion script should be put somewhere in your `$fpath` named -`_`. - -### What's Supported - -* Completion for all non-hidden subcommands using their `.Short` description. -* Completion for all non-hidden flags using the following rules: - * Filename completion works by marking the flag with `cmd.MarkFlagFilename...` - family of commands. - * The requirement for argument to the flag is decided by the `.NoOptDefVal` - flag value - if it's empty then completion will expect an argument. - * Flags of one of the various `*Array` and `*Slice` types supports multiple - specifications (with or without argument depending on the specific type). -* Completion of positional arguments using the following rules: - * Argument position for all options below starts at `1`. If argument position - `0` is requested it will raise an error. - * Use `command.MarkZshCompPositionalArgumentFile` to complete filenames. Glob - patterns (e.g. `"*.log"`) are optional - if not specified it will offer to - complete all file types. - * Use `command.MarkZshCompPositionalArgumentWords` to offer specific words for - completion. At least one word is required. - * It's possible to specify completion for some arguments and leave some - unspecified (e.g. offer words for second argument but nothing for first - argument). This will cause no completion for first argument but words - completion for second argument. - * If no argument completion was specified for 1st argument (but optionally was - specified for 2nd) and the command has `ValidArgs` it will be used as - completion options for 1st argument. - * Argument completions only offered for commands with no subcommands. - -### What's not yet Supported - -* Custom completion scripts are not supported yet (We should probably create zsh - specific one, doesn't make sense to re-use the bash one as the functions will - be different). -* Whatever other feature you're looking for and doesn't exist :) +## Generating Zsh Completion For Your cobra.Command + +Please refer to [Shell Completions](shell_completions.md) for details. + +## Zsh completions standardization + +Cobra 1.1 standardized its zsh completion support to align it with its other shell completions. Although the API was kept backwards-compatible, some small changes in behavior were introduced. + +### Deprecation summary + +See further below for more details on these deprecations. + +* `cmd.MarkZshCompPositionalArgumentFile(pos, []string{})` is no longer needed. It is therefore **deprecated** and silently ignored. +* `cmd.MarkZshCompPositionalArgumentFile(pos, glob[])` is **deprecated** and silently ignored. + * Instead use `ValidArgsFunction` with `ShellCompDirectiveFilterFileExt`. +* `cmd.MarkZshCompPositionalArgumentWords()` is **deprecated** and silently ignored. + * Instead use `ValidArgsFunction`. + +### Behavioral changes + +**Noun completion** +|Old behavior|New behavior| +|---|---| +|No file completion by default (opposite of bash)|File completion by default; use `ValidArgsFunction` with `ShellCompDirectiveNoFileComp` to turn off file completion on a per-argument basis| +|Completion of flag names without the `-` prefix having been typed|Flag names are only completed if the user has typed the first `-`| +`cmd.MarkZshCompPositionalArgumentFile(pos, []string{})` used to turn on file completion on a per-argument position basis|File completion for all arguments by default; `cmd.MarkZshCompPositionalArgumentFile()` is **deprecated** and silently ignored| +|`cmd.MarkZshCompPositionalArgumentFile(pos, glob[])` used to turn on file completion **with glob filtering** on a per-argument position basis (zsh-specific)|`cmd.MarkZshCompPositionalArgumentFile()` is **deprecated** and silently ignored; use `ValidArgsFunction` with `ShellCompDirectiveFilterFileExt` for file **extension** filtering (not full glob filtering)| +|`cmd.MarkZshCompPositionalArgumentWords(pos, words[])` used to provide completion choices on a per-argument position basis (zsh-specific)|`cmd.MarkZshCompPositionalArgumentWords()` is **deprecated** and silently ignored; use `ValidArgsFunction` to achieve the same behavior| + +**Flag-value completion** + +|Old behavior|New behavior| +|---|---| +|No file completion by default (opposite of bash)|File completion by default; use `RegisterFlagCompletionFunc()` with `ShellCompDirectiveNoFileComp` to turn off file completion| +|`cmd.MarkFlagFilename(flag, []string{})` and similar used to turn on file completion|File completion by default; `cmd.MarkFlagFilename(flag, []string{})` no longer needed in this context and silently ignored| +|`cmd.MarkFlagFilename(flag, glob[])` used to turn on file completion **with glob filtering** (syntax of `[]string{"*.yaml", "*.yml"}` incompatible with bash)|Will continue to work, however, support for bash syntax is added and should be used instead so as to work for all shells (`[]string{"yaml", "yml"}`)| +|`cmd.MarkFlagDirname(flag)` only completes directories (zsh-specific)|Has been added for all shells| +|Completion of a flag name does not repeat, unless flag is of type `*Array` or `*Slice` (not supported by bash)|Retained for `zsh` and added to `fish`| +|Completion of a flag name does not provide the `=` form (unlike bash)|Retained for `zsh` and added to `fish`| + +**Improvements** + +* Custom completion support (`ValidArgsFunction` and `RegisterFlagCompletionFunc()`) +* File completion by default if no other completions found +* Handling of required flags +* File extension filtering no longer mutually exclusive with bash usage +* Completion of directory names *within* another directory +* Support for `=` form of flags diff --git a/vendor/github.com/spf13/viper/util.go b/vendor/github.com/spf13/viper/util.go index b788969..cee6b24 100644 --- a/vendor/github.com/spf13/viper/util.go +++ b/vendor/github.com/spf13/viper/util.go @@ -91,13 +91,22 @@ func insensitiviseMap(m map[string]interface{}) { func absPathify(inPath string) string { jww.INFO.Println("Trying to resolve absolute path to", inPath) - if strings.HasPrefix(inPath, "$HOME") { + if inPath == "$HOME" || strings.HasPrefix(inPath, "$HOME"+string(os.PathSeparator)) { inPath = userHomeDir() + inPath[5:] } if strings.HasPrefix(inPath, "$") { end := strings.Index(inPath, string(os.PathSeparator)) - inPath = os.Getenv(inPath[1:end]) + inPath[end:] + + var value, suffix string + if end == -1 { + value = os.Getenv(inPath[1:]) + } else { + value = os.Getenv(inPath[1:end]) + suffix = inPath[end:] + } + + inPath = value + suffix } if filepath.IsAbs(inPath) { diff --git a/vendor/github.com/spf13/viper/viper.go b/vendor/github.com/spf13/viper/viper.go index f61f4ed..405dc20 100644 --- a/vendor/github.com/spf13/viper/viper.go +++ b/vendor/github.com/spf13/viper/viper.go @@ -896,13 +896,7 @@ func UnmarshalKey(key string, rawVal interface{}, opts ...DecoderConfigOption) e return v.UnmarshalKey(key, rawVal, opts...) } func (v *Viper) UnmarshalKey(key string, rawVal interface{}, opts ...DecoderConfigOption) error { - err := decode(v.Get(key), defaultDecoderConfig(rawVal, opts...)) - - if err != nil { - return err - } - - return nil + return decode(v.Get(key), defaultDecoderConfig(rawVal, opts...)) } // Unmarshal unmarshals the config into a Struct. Make sure that the tags @@ -911,13 +905,7 @@ func Unmarshal(rawVal interface{}, opts ...DecoderConfigOption) error { return v.Unmarshal(rawVal, opts...) } func (v *Viper) Unmarshal(rawVal interface{}, opts ...DecoderConfigOption) error { - err := decode(v.AllSettings(), defaultDecoderConfig(rawVal, opts...)) - - if err != nil { - return err - } - - return nil + return decode(v.AllSettings(), defaultDecoderConfig(rawVal, opts...)) } // defaultDecoderConfig returns default mapsstructure.DecoderConfig with suppot @@ -956,13 +944,7 @@ func (v *Viper) UnmarshalExact(rawVal interface{}, opts ...DecoderConfigOption) config := defaultDecoderConfig(rawVal, opts...) config.ErrorUnused = true - err := decode(v.AllSettings(), config) - - if err != nil { - return err - } - - return nil + return decode(v.AllSettings(), config) } // BindPFlags binds a full flag set to the configuration, using each flag's long diff --git a/vendor/github.com/ssgreg/nlreturn/v2/LICENSE b/vendor/github.com/ssgreg/nlreturn/v2/LICENSE new file mode 100644 index 0000000..0a5b4d1 --- /dev/null +++ b/vendor/github.com/ssgreg/nlreturn/v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Grigory Zubankov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go b/vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go new file mode 100644 index 0000000..52318cc --- /dev/null +++ b/vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go @@ -0,0 +1,86 @@ +package nlreturn + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" +) + +const ( + linterName = "nlreturn" + linterDoc = `Linter requires a new line before return and branch statements except when the return is alone inside a statement group (such as an if statement) to increase code clarity.` +) + +// NewAnalyzer returns a new nlreturn analyzer. +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: linterName, + Doc: linterDoc, + Run: run, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { + ast.Inspect(f, func(node ast.Node) bool { + switch c := node.(type) { + case *ast.CaseClause: + inspectBlock(pass, c.Body) + case *ast.CommClause: + inspectBlock(pass, c.Body) + case *ast.BlockStmt: + inspectBlock(pass, c.List) + } + + return true + }) + } + + return nil, nil +} + +func inspectBlock(pass *analysis.Pass, block []ast.Stmt) { + for i, stmt := range block { + switch stmt.(type) { + case *ast.BranchStmt, *ast.ReturnStmt: + if i == 0 { + return + } + + if line(pass, stmt.Pos())-line(pass, block[i-1].End()) <= 1 { + pass.Report(analysis.Diagnostic{ + Pos: stmt.Pos(), + Message: fmt.Sprintf("%s with no blank line before", name(stmt)), + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + { + Pos: stmt.Pos(), + NewText: []byte("\n"), + End: stmt.Pos(), + }, + }, + }, + }, + }) + } + } + } +} + +func name(stmt ast.Stmt) string { + switch c := stmt.(type) { + case *ast.BranchStmt: + return c.Tok.String() + case *ast.ReturnStmt: + return "return" + default: + return "unknown" + } +} + +func line(pass *analysis.Pass, pos token.Pos) int { + return pass.Fset.Position(pos).Line +} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_compare.go b/vendor/github.com/stretchr/testify/assert/assertion_compare.go index dc20039..41649d2 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_compare.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_compare.go @@ -13,12 +13,42 @@ const ( compareGreater ) +var ( + intType = reflect.TypeOf(int(1)) + int8Type = reflect.TypeOf(int8(1)) + int16Type = reflect.TypeOf(int16(1)) + int32Type = reflect.TypeOf(int32(1)) + int64Type = reflect.TypeOf(int64(1)) + + uintType = reflect.TypeOf(uint(1)) + uint8Type = reflect.TypeOf(uint8(1)) + uint16Type = reflect.TypeOf(uint16(1)) + uint32Type = reflect.TypeOf(uint32(1)) + uint64Type = reflect.TypeOf(uint64(1)) + + float32Type = reflect.TypeOf(float32(1)) + float64Type = reflect.TypeOf(float64(1)) + + stringType = reflect.TypeOf("") +) + func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { + obj1Value := reflect.ValueOf(obj1) + obj2Value := reflect.ValueOf(obj2) + + // throughout this switch we try and avoid calling .Convert() if possible, + // as this has a pretty big performance impact switch kind { case reflect.Int: { - intobj1 := obj1.(int) - intobj2 := obj2.(int) + intobj1, ok := obj1.(int) + if !ok { + intobj1 = obj1Value.Convert(intType).Interface().(int) + } + intobj2, ok := obj2.(int) + if !ok { + intobj2 = obj2Value.Convert(intType).Interface().(int) + } if intobj1 > intobj2 { return compareGreater, true } @@ -31,8 +61,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Int8: { - int8obj1 := obj1.(int8) - int8obj2 := obj2.(int8) + int8obj1, ok := obj1.(int8) + if !ok { + int8obj1 = obj1Value.Convert(int8Type).Interface().(int8) + } + int8obj2, ok := obj2.(int8) + if !ok { + int8obj2 = obj2Value.Convert(int8Type).Interface().(int8) + } if int8obj1 > int8obj2 { return compareGreater, true } @@ -45,8 +81,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Int16: { - int16obj1 := obj1.(int16) - int16obj2 := obj2.(int16) + int16obj1, ok := obj1.(int16) + if !ok { + int16obj1 = obj1Value.Convert(int16Type).Interface().(int16) + } + int16obj2, ok := obj2.(int16) + if !ok { + int16obj2 = obj2Value.Convert(int16Type).Interface().(int16) + } if int16obj1 > int16obj2 { return compareGreater, true } @@ -59,8 +101,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Int32: { - int32obj1 := obj1.(int32) - int32obj2 := obj2.(int32) + int32obj1, ok := obj1.(int32) + if !ok { + int32obj1 = obj1Value.Convert(int32Type).Interface().(int32) + } + int32obj2, ok := obj2.(int32) + if !ok { + int32obj2 = obj2Value.Convert(int32Type).Interface().(int32) + } if int32obj1 > int32obj2 { return compareGreater, true } @@ -73,8 +121,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Int64: { - int64obj1 := obj1.(int64) - int64obj2 := obj2.(int64) + int64obj1, ok := obj1.(int64) + if !ok { + int64obj1 = obj1Value.Convert(int64Type).Interface().(int64) + } + int64obj2, ok := obj2.(int64) + if !ok { + int64obj2 = obj2Value.Convert(int64Type).Interface().(int64) + } if int64obj1 > int64obj2 { return compareGreater, true } @@ -87,8 +141,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint: { - uintobj1 := obj1.(uint) - uintobj2 := obj2.(uint) + uintobj1, ok := obj1.(uint) + if !ok { + uintobj1 = obj1Value.Convert(uintType).Interface().(uint) + } + uintobj2, ok := obj2.(uint) + if !ok { + uintobj2 = obj2Value.Convert(uintType).Interface().(uint) + } if uintobj1 > uintobj2 { return compareGreater, true } @@ -101,8 +161,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint8: { - uint8obj1 := obj1.(uint8) - uint8obj2 := obj2.(uint8) + uint8obj1, ok := obj1.(uint8) + if !ok { + uint8obj1 = obj1Value.Convert(uint8Type).Interface().(uint8) + } + uint8obj2, ok := obj2.(uint8) + if !ok { + uint8obj2 = obj2Value.Convert(uint8Type).Interface().(uint8) + } if uint8obj1 > uint8obj2 { return compareGreater, true } @@ -115,8 +181,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint16: { - uint16obj1 := obj1.(uint16) - uint16obj2 := obj2.(uint16) + uint16obj1, ok := obj1.(uint16) + if !ok { + uint16obj1 = obj1Value.Convert(uint16Type).Interface().(uint16) + } + uint16obj2, ok := obj2.(uint16) + if !ok { + uint16obj2 = obj2Value.Convert(uint16Type).Interface().(uint16) + } if uint16obj1 > uint16obj2 { return compareGreater, true } @@ -129,8 +201,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint32: { - uint32obj1 := obj1.(uint32) - uint32obj2 := obj2.(uint32) + uint32obj1, ok := obj1.(uint32) + if !ok { + uint32obj1 = obj1Value.Convert(uint32Type).Interface().(uint32) + } + uint32obj2, ok := obj2.(uint32) + if !ok { + uint32obj2 = obj2Value.Convert(uint32Type).Interface().(uint32) + } if uint32obj1 > uint32obj2 { return compareGreater, true } @@ -143,8 +221,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Uint64: { - uint64obj1 := obj1.(uint64) - uint64obj2 := obj2.(uint64) + uint64obj1, ok := obj1.(uint64) + if !ok { + uint64obj1 = obj1Value.Convert(uint64Type).Interface().(uint64) + } + uint64obj2, ok := obj2.(uint64) + if !ok { + uint64obj2 = obj2Value.Convert(uint64Type).Interface().(uint64) + } if uint64obj1 > uint64obj2 { return compareGreater, true } @@ -157,8 +241,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Float32: { - float32obj1 := obj1.(float32) - float32obj2 := obj2.(float32) + float32obj1, ok := obj1.(float32) + if !ok { + float32obj1 = obj1Value.Convert(float32Type).Interface().(float32) + } + float32obj2, ok := obj2.(float32) + if !ok { + float32obj2 = obj2Value.Convert(float32Type).Interface().(float32) + } if float32obj1 > float32obj2 { return compareGreater, true } @@ -171,8 +261,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.Float64: { - float64obj1 := obj1.(float64) - float64obj2 := obj2.(float64) + float64obj1, ok := obj1.(float64) + if !ok { + float64obj1 = obj1Value.Convert(float64Type).Interface().(float64) + } + float64obj2, ok := obj2.(float64) + if !ok { + float64obj2 = obj2Value.Convert(float64Type).Interface().(float64) + } if float64obj1 > float64obj2 { return compareGreater, true } @@ -185,8 +281,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { } case reflect.String: { - stringobj1 := obj1.(string) - stringobj2 := obj2.(string) + stringobj1, ok := obj1.(string) + if !ok { + stringobj1 = obj1Value.Convert(stringType).Interface().(string) + } + stringobj2, ok := obj2.(string) + if !ok { + stringobj2 = obj2Value.Convert(stringType).Interface().(string) + } if stringobj1 > stringobj2 { return compareGreater, true } @@ -240,6 +342,24 @@ func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...inter return compareTwoValues(t, e1, e2, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs) } +// Positive asserts that the specified element is positive +// +// assert.Positive(t, 1) +// assert.Positive(t, 1.23) +func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { + zero := reflect.Zero(reflect.TypeOf(e)) + return compareTwoValues(t, e, zero.Interface(), []CompareType{compareGreater}, "\"%v\" is not positive", msgAndArgs) +} + +// Negative asserts that the specified element is negative +// +// assert.Negative(t, -1) +// assert.Negative(t, -1.23) +func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { + zero := reflect.Zero(reflect.TypeOf(e)) + return compareTwoValues(t, e, zero.Interface(), []CompareType{compareLess}, "\"%v\" is not negative", msgAndArgs) +} + func compareTwoValues(t TestingT, e1 interface{}, e2 interface{}, allowedComparesResults []CompareType, failMessage string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go b/vendor/github.com/stretchr/testify/assert/assertion_format.go index 49370eb..4dfd122 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_format.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go @@ -114,6 +114,24 @@ func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { return Error(t, err, append([]interface{}{msg}, args...)...) } +// ErrorAsf asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. +// This is a wrapper for errors.As. +func ErrorAsf(t TestingT, err error, target interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return ErrorAs(t, err, target, append([]interface{}{msg}, args...)...) +} + +// ErrorIsf asserts that at least one of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func ErrorIsf(t TestingT, err error, target error, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return ErrorIs(t, err, target, append([]interface{}{msg}, args...)...) +} + // Eventuallyf asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // @@ -321,6 +339,54 @@ func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsil return InEpsilonSlice(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...) } +// IsDecreasingf asserts that the collection is decreasing +// +// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return IsDecreasing(t, object, append([]interface{}{msg}, args...)...) +} + +// IsIncreasingf asserts that the collection is increasing +// +// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return IsIncreasing(t, object, append([]interface{}{msg}, args...)...) +} + +// IsNonDecreasingf asserts that the collection is not decreasing +// +// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return IsNonDecreasing(t, object, append([]interface{}{msg}, args...)...) +} + +// IsNonIncreasingf asserts that the collection is not increasing +// +// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +func IsNonIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return IsNonIncreasing(t, object, append([]interface{}{msg}, args...)...) +} + // IsTypef asserts that the specified objects are of the same type. func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { @@ -375,6 +441,17 @@ func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args . return LessOrEqual(t, e1, e2, append([]interface{}{msg}, args...)...) } +// Negativef asserts that the specified element is negative +// +// assert.Negativef(t, -1, "error message %s", "formatted") +// assert.Negativef(t, -1.23, "error message %s", "formatted") +func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Negative(t, e, append([]interface{}{msg}, args...)...) +} + // Neverf asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // @@ -476,6 +553,15 @@ func NotEqualValuesf(t TestingT, expected interface{}, actual interface{}, msg s return NotEqualValues(t, expected, actual, append([]interface{}{msg}, args...)...) } +// NotErrorIsf asserts that at none of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return NotErrorIs(t, err, target, append([]interface{}{msg}, args...)...) +} + // NotNilf asserts that the specified object is not nil. // // assert.NotNilf(t, err, "error message %s", "formatted") @@ -572,6 +658,17 @@ func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg str return PanicsWithValue(t, expected, f, append([]interface{}{msg}, args...)...) } +// Positivef asserts that the specified element is positive +// +// assert.Positivef(t, 1, "error message %s", "formatted") +// assert.Positivef(t, 1.23, "error message %s", "formatted") +func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Positive(t, e, append([]interface{}{msg}, args...)...) +} + // Regexpf asserts that a specified regexp matches a string. // // assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go index 9db8894..25337a6 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_forward.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go @@ -204,6 +204,42 @@ func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool { return Error(a.t, err, msgAndArgs...) } +// ErrorAs asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. +// This is a wrapper for errors.As. +func (a *Assertions) ErrorAs(err error, target interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return ErrorAs(a.t, err, target, msgAndArgs...) +} + +// ErrorAsf asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. +// This is a wrapper for errors.As. +func (a *Assertions) ErrorAsf(err error, target interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return ErrorAsf(a.t, err, target, msg, args...) +} + +// ErrorIs asserts that at least one of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func (a *Assertions) ErrorIs(err error, target error, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return ErrorIs(a.t, err, target, msgAndArgs...) +} + +// ErrorIsf asserts that at least one of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func (a *Assertions) ErrorIsf(err error, target error, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return ErrorIsf(a.t, err, target, msg, args...) +} + // Errorf asserts that a function returned an error (i.e. not `nil`). // // actualObj, err := SomeFunction() @@ -631,6 +667,102 @@ func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilo return InEpsilonf(a.t, expected, actual, epsilon, msg, args...) } +// IsDecreasing asserts that the collection is decreasing +// +// a.IsDecreasing([]int{2, 1, 0}) +// a.IsDecreasing([]float{2, 1}) +// a.IsDecreasing([]string{"b", "a"}) +func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsDecreasing(a.t, object, msgAndArgs...) +} + +// IsDecreasingf asserts that the collection is decreasing +// +// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted") +// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted") +func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsDecreasingf(a.t, object, msg, args...) +} + +// IsIncreasing asserts that the collection is increasing +// +// a.IsIncreasing([]int{1, 2, 3}) +// a.IsIncreasing([]float{1, 2}) +// a.IsIncreasing([]string{"a", "b"}) +func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsIncreasing(a.t, object, msgAndArgs...) +} + +// IsIncreasingf asserts that the collection is increasing +// +// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted") +// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted") +func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsIncreasingf(a.t, object, msg, args...) +} + +// IsNonDecreasing asserts that the collection is not decreasing +// +// a.IsNonDecreasing([]int{1, 1, 2}) +// a.IsNonDecreasing([]float{1, 2}) +// a.IsNonDecreasing([]string{"a", "b"}) +func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsNonDecreasing(a.t, object, msgAndArgs...) +} + +// IsNonDecreasingf asserts that the collection is not decreasing +// +// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted") +func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsNonDecreasingf(a.t, object, msg, args...) +} + +// IsNonIncreasing asserts that the collection is not increasing +// +// a.IsNonIncreasing([]int{2, 1, 1}) +// a.IsNonIncreasing([]float{2, 1}) +// a.IsNonIncreasing([]string{"b", "a"}) +func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsNonIncreasing(a.t, object, msgAndArgs...) +} + +// IsNonIncreasingf asserts that the collection is not increasing +// +// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted") +func (a *Assertions) IsNonIncreasingf(object interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return IsNonIncreasingf(a.t, object, msg, args...) +} + // IsType asserts that the specified objects are of the same type. func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { @@ -739,6 +871,28 @@ func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...i return Lessf(a.t, e1, e2, msg, args...) } +// Negative asserts that the specified element is negative +// +// a.Negative(-1) +// a.Negative(-1.23) +func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Negative(a.t, e, msgAndArgs...) +} + +// Negativef asserts that the specified element is negative +// +// a.Negativef(-1, "error message %s", "formatted") +// a.Negativef(-1.23, "error message %s", "formatted") +func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Negativef(a.t, e, msg, args...) +} + // Never asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // @@ -941,6 +1095,24 @@ func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg str return NotEqualf(a.t, expected, actual, msg, args...) } +// NotErrorIs asserts that at none of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func (a *Assertions) NotErrorIs(err error, target error, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NotErrorIs(a.t, err, target, msgAndArgs...) +} + +// NotErrorIsf asserts that at none of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NotErrorIsf(a.t, err, target, msg, args...) +} + // NotNil asserts that the specified object is not nil. // // a.NotNil(err) @@ -1133,6 +1305,28 @@ func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) b return Panicsf(a.t, f, msg, args...) } +// Positive asserts that the specified element is positive +// +// a.Positive(1) +// a.Positive(1.23) +func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Positive(a.t, e, msgAndArgs...) +} + +// Positivef asserts that the specified element is positive +// +// a.Positivef(1, "error message %s", "formatted") +// a.Positivef(1.23, "error message %s", "formatted") +func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Positivef(a.t, e, msg, args...) +} + // Regexp asserts that a specified regexp matches a string. // // a.Regexp(regexp.MustCompile("start"), "it's starting") diff --git a/vendor/github.com/stretchr/testify/assert/assertion_order.go b/vendor/github.com/stretchr/testify/assert/assertion_order.go new file mode 100644 index 0000000..1c3b471 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/assertion_order.go @@ -0,0 +1,81 @@ +package assert + +import ( + "fmt" + "reflect" +) + +// isOrdered checks that collection contains orderable elements. +func isOrdered(t TestingT, object interface{}, allowedComparesResults []CompareType, failMessage string, msgAndArgs ...interface{}) bool { + objKind := reflect.TypeOf(object).Kind() + if objKind != reflect.Slice && objKind != reflect.Array { + return false + } + + objValue := reflect.ValueOf(object) + objLen := objValue.Len() + + if objLen <= 1 { + return true + } + + value := objValue.Index(0) + valueInterface := value.Interface() + firstValueKind := value.Kind() + + for i := 1; i < objLen; i++ { + prevValue := value + prevValueInterface := valueInterface + + value = objValue.Index(i) + valueInterface = value.Interface() + + compareResult, isComparable := compare(prevValueInterface, valueInterface, firstValueKind) + + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\" and \"%s\"", reflect.TypeOf(value), reflect.TypeOf(prevValue)), msgAndArgs...) + } + + if !containsValue(allowedComparesResults, compareResult) { + return Fail(t, fmt.Sprintf(failMessage, prevValue, value), msgAndArgs...) + } + } + + return true +} + +// IsIncreasing asserts that the collection is increasing +// +// assert.IsIncreasing(t, []int{1, 2, 3}) +// assert.IsIncreasing(t, []float{1, 2}) +// assert.IsIncreasing(t, []string{"a", "b"}) +func IsIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return isOrdered(t, object, []CompareType{compareLess}, "\"%v\" is not less than \"%v\"", msgAndArgs) +} + +// IsNonIncreasing asserts that the collection is not increasing +// +// assert.IsNonIncreasing(t, []int{2, 1, 1}) +// assert.IsNonIncreasing(t, []float{2, 1}) +// assert.IsNonIncreasing(t, []string{"b", "a"}) +func IsNonIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return isOrdered(t, object, []CompareType{compareEqual, compareGreater}, "\"%v\" is not greater than or equal to \"%v\"", msgAndArgs) +} + +// IsDecreasing asserts that the collection is decreasing +// +// assert.IsDecreasing(t, []int{2, 1, 0}) +// assert.IsDecreasing(t, []float{2, 1}) +// assert.IsDecreasing(t, []string{"b", "a"}) +func IsDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return isOrdered(t, object, []CompareType{compareGreater}, "\"%v\" is not greater than \"%v\"", msgAndArgs) +} + +// IsNonDecreasing asserts that the collection is not decreasing +// +// assert.IsNonDecreasing(t, []int{1, 1, 2}) +// assert.IsNonDecreasing(t, []float{1, 2}) +// assert.IsNonDecreasing(t, []string{"a", "b"}) +func IsNonDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return isOrdered(t, object, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs) +} diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go index 914a10d..bcac440 100644 --- a/vendor/github.com/stretchr/testify/assert/assertions.go +++ b/vendor/github.com/stretchr/testify/assert/assertions.go @@ -172,8 +172,8 @@ func isTest(name, prefix string) bool { if len(name) == len(prefix) { // "Test" is ok return true } - rune, _ := utf8.DecodeRuneInString(name[len(prefix):]) - return !unicode.IsLower(rune) + r, _ := utf8.DecodeRuneInString(name[len(prefix):]) + return !unicode.IsLower(r) } func messageFromMsgAndArgs(msgAndArgs ...interface{}) string { @@ -1622,6 +1622,7 @@ var spewConfig = spew.ConfigState{ DisableCapacities: true, SortKeys: true, DisableMethods: true, + MaxDepth: 10, } type tHelper interface { @@ -1693,3 +1694,81 @@ func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.D } } } + +// ErrorIs asserts that at least one of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func ErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if errors.Is(err, target) { + return true + } + + var expectedText string + if target != nil { + expectedText = target.Error() + } + + chain := buildErrorChainString(err) + + return Fail(t, fmt.Sprintf("Target error should be in err chain:\n"+ + "expected: %q\n"+ + "in chain: %s", expectedText, chain, + ), msgAndArgs...) +} + +// NotErrorIs asserts that at none of the errors in err's chain matches target. +// This is a wrapper for errors.Is. +func NotErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if !errors.Is(err, target) { + return true + } + + var expectedText string + if target != nil { + expectedText = target.Error() + } + + chain := buildErrorChainString(err) + + return Fail(t, fmt.Sprintf("Target error should not be in err chain:\n"+ + "found: %q\n"+ + "in chain: %s", expectedText, chain, + ), msgAndArgs...) +} + +// ErrorAs asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. +// This is a wrapper for errors.As. +func ErrorAs(t TestingT, err error, target interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if errors.As(err, target) { + return true + } + + chain := buildErrorChainString(err) + + return Fail(t, fmt.Sprintf("Should be in error chain:\n"+ + "expected: %q\n"+ + "in chain: %s", target, chain, + ), msgAndArgs...) +} + +func buildErrorChainString(err error) string { + if err == nil { + return "" + } + + e := errors.Unwrap(err) + chain := fmt.Sprintf("%q", err.Error()) + for e != nil { + chain += fmt.Sprintf("\n\t%q", e.Error()) + e = errors.Unwrap(e) + } + return chain +} diff --git a/vendor/github.com/stretchr/testify/mock/mock.go b/vendor/github.com/stretchr/testify/mock/mock.go index c6df448..e2e6a2d 100644 --- a/vendor/github.com/stretchr/testify/mock/mock.go +++ b/vendor/github.com/stretchr/testify/mock/mock.go @@ -297,25 +297,52 @@ func (m *Mock) findExpectedCall(method string, arguments ...interface{}) (int, * return -1, expectedCall } +type matchCandidate struct { + call *Call + mismatch string + diffCount int +} + +func (c matchCandidate) isBetterMatchThan(other matchCandidate) bool { + if c.call == nil { + return false + } + if other.call == nil { + return true + } + + if c.diffCount > other.diffCount { + return false + } + if c.diffCount < other.diffCount { + return true + } + + if c.call.Repeatability > 0 && other.call.Repeatability <= 0 { + return true + } + return false +} + func (m *Mock) findClosestCall(method string, arguments ...interface{}) (*Call, string) { - var diffCount int - var closestCall *Call - var err string + var bestMatch matchCandidate for _, call := range m.expectedCalls() { if call.Method == method { errInfo, tempDiffCount := call.Arguments.Diff(arguments) - if tempDiffCount < diffCount || diffCount == 0 { - diffCount = tempDiffCount - closestCall = call - err = errInfo + tempCandidate := matchCandidate{ + call: call, + mismatch: errInfo, + diffCount: tempDiffCount, + } + if tempCandidate.isBetterMatchThan(bestMatch) { + bestMatch = tempCandidate } - } } - return closestCall, err + return bestMatch.call, bestMatch.mismatch } func callString(method string, arguments Arguments, includeArgumentValues bool) string { diff --git a/vendor/github.com/tetafro/godot/.gitignore b/vendor/github.com/tetafro/godot/.gitignore index 2b87e39..db77fd1 100644 --- a/vendor/github.com/tetafro/godot/.gitignore +++ b/vendor/github.com/tetafro/godot/.gitignore @@ -1,2 +1,4 @@ /dist/ +/vendor/ /godot +/profile.out diff --git a/vendor/github.com/tetafro/godot/.golangci.yml b/vendor/github.com/tetafro/godot/.golangci.yml new file mode 100644 index 0000000..2b799b2 --- /dev/null +++ b/vendor/github.com/tetafro/godot/.golangci.yml @@ -0,0 +1,67 @@ +run: + concurrency: 2 + deadline: 5m + +skip-dirs: + - path: ./testdata/ + +linters: + disable-all: true + enable: + - deadcode + - errcheck + - gosimple + - govet + - ineffassign + - staticcheck + - structcheck + - typecheck + - unused + - varcheck + - bodyclose + - depguard + - dogsled + - dupl + - funlen + - gochecknoinits + - goconst + - gocritic + - gocyclo + - godot + - gofmt + - gofumpt + - goimports + - golint + - gomnd + - gomodguard + - goprintffuncname + - gosec + - lll + - maligned + - misspell + - nakedret + - nestif + - prealloc + - rowserrcheck + - scopelint + - stylecheck + - unconvert + - unparam + - whitespace + +linters-settings: + godot: + check-all: true + +issues: + exclude-use-default: false + exclude-rules: + - path: _test\.go + linters: + - dupl + - errcheck + - funlen + - gosec + - path: cmd/godot/main\.go + linters: + - gomnd diff --git a/vendor/github.com/tetafro/godot/.goreleaser.yml b/vendor/github.com/tetafro/godot/.goreleaser.yml index 87a05a2..c0fc2b6 100644 --- a/vendor/github.com/tetafro/godot/.goreleaser.yml +++ b/vendor/github.com/tetafro/godot/.goreleaser.yml @@ -8,5 +8,4 @@ changelog: sort: asc filters: exclude: - - '^docs:' - - '^test:' + - '^Merge pull request' diff --git a/vendor/github.com/tetafro/godot/Makefile b/vendor/github.com/tetafro/godot/Makefile index fee7693..f167051 100644 --- a/vendor/github.com/tetafro/godot/Makefile +++ b/vendor/github.com/tetafro/godot/Makefile @@ -1,7 +1,21 @@ +.PHONY: dep +dep: + go mod tidy && go mod verify + .PHONY: test test: go test ./... +.PHONY: cover +cover: + go test -coverprofile cover.out ./... + go tool cover -html=cover.out + rm -f cover.out + +.PHONY: lint +lint: + golangci-lint run + .PHONY: build build: go build -o godot ./cmd/godot diff --git a/vendor/github.com/tetafro/godot/README.md b/vendor/github.com/tetafro/godot/README.md index e2d2f6b..7e60913 100644 --- a/vendor/github.com/tetafro/godot/README.md +++ b/vendor/github.com/tetafro/godot/README.md @@ -13,24 +13,35 @@ end of the last sentence if needed. > Comments should begin with the name of the thing being described > and end in a period -## Install and run +## Install *NOTE: Godot is available as a part of [GolangCI Lint](https://github.com/golangci/golangci-lint) (disabled by default).* Build from source + ```sh go get -u github.com/tetafro/godot/cmd/godot ``` or download binary from [releases page](https://github.com/tetafro/godot/releases). -Run +## Run + ```sh godot ./myproject ``` -## Examples +Autofix flags are also available + +```sh +godot -f ./myproject # fix issues and print the result +godot -w ./myproject # fix issues and replace the original file +``` + +See all flags with `godot -h`. + +## Example Code @@ -46,9 +57,5 @@ func Sum(a, b int) int { Output ```sh -Top level comment should end in a period: math/math.go:3:1 +Comment should end in a period: math/math.go:3:1 ``` - -See more examples in test files: -- [for default mode](testdata/example_default.go) -- [for using --all flag](testdata/example_checkall.go) diff --git a/vendor/github.com/tetafro/godot/checks.go b/vendor/github.com/tetafro/godot/checks.go new file mode 100644 index 0000000..0e66add --- /dev/null +++ b/vendor/github.com/tetafro/godot/checks.go @@ -0,0 +1,260 @@ +package godot + +import ( + "go/token" + "regexp" + "strings" + "unicode" +) + +// Error messages. +const ( + noPeriodMessage = "Comment should end in a period" + noCapitalMessage = "Sentence should start with a capital letter" +) + +var ( + // List of valid sentence ending. + // A sentence can be inside parenthesis, and therefore ends with parenthesis. + lastChars = []string{".", "?", "!", ".)", "?)", "!)", specialReplacer} + + // Special tags in comments like "// nolint:", or "// +k8s:". + tags = regexp.MustCompile(`^\+?[a-z0-9]+:`) + + // Special hashtags in comments like "// #nosec". + hashtags = regexp.MustCompile(`^#[a-z]+($|\s)`) + + // URL at the end of the line. + endURL = regexp.MustCompile(`[a-z]+://[^\s]+$`) +) + +// checkComments checks every comment accordings to the rules from +// `settings` argument. +func checkComments(comments []comment, settings Settings) []Issue { + var issues []Issue // nolint: prealloc + for _, c := range comments { + if settings.Period { + if iss := checkCommentForPeriod(c); iss != nil { + issues = append(issues, *iss) + } + } + if settings.Capital { + if iss := checkCommentForCapital(c); len(iss) > 0 { + issues = append(issues, iss...) + } + } + } + return issues +} + +// checkCommentForPeriod checks that the last sentense of the comment ends +// in a period. +func checkCommentForPeriod(c comment) *Issue { + pos, ok := checkPeriod(c.text) + if ok { + return nil + } + + // Shift position by the length of comment's special symbols: /* or // + isBlock := strings.HasPrefix(c.lines[0], "/*") + if (isBlock && pos.line == 1) || !isBlock { + pos.column += 2 + } + + iss := Issue{ + Pos: token.Position{ + Filename: c.start.Filename, + Offset: c.start.Offset, + Line: pos.line + c.start.Line - 1, + Column: pos.column + c.start.Column - 1, + }, + Message: noPeriodMessage, + } + + // Make a replacement. Use `pos.line` to get an original line from + // attached lines. Use `iss.Pos.Column` because it's a position in + // the original line. + original := []rune(c.lines[pos.line-1]) + iss.Replacement = string(original[:iss.Pos.Column-1]) + "." + + string(original[iss.Pos.Column-1:]) + + // Save replacement to raw lines to be able to combine it with + // further replacements + c.lines[pos.line-1] = iss.Replacement + + return &iss +} + +// checkCommentForCapital checks that the each sentense of the comment starts with +// a capital letter. +// nolint: unparam +func checkCommentForCapital(c comment) []Issue { + pp := checkCapital(c.text, c.decl) + if len(pp) == 0 { + return nil + } + + issues := make([]Issue, len(pp)) + for i, pos := range pp { + // Shift position by the length of comment's special symbols: /* or // + isBlock := strings.HasPrefix(c.lines[0], "/*") + if (isBlock && pos.line == 1) || !isBlock { + pos.column += 2 + } + + iss := Issue{ + Pos: token.Position{ + Filename: c.start.Filename, + Offset: c.start.Offset, + Line: pos.line + c.start.Line - 1, + Column: pos.column + c.start.Column - 1, + }, + Message: noCapitalMessage, + } + + // Make a replacement. Use `pos.line` to get an original line from + // attached lines. Use `iss.Pos.Column` because it's a position in + // the original line. + rep := []rune(c.lines[pos.line-1]) + rep[iss.Pos.Column-1] = unicode.ToTitle(rep[iss.Pos.Column-1]) + iss.Replacement = string(rep) + + // Save replacement to raw lines to be able to combine it with + // further replacements + c.lines[pos.line-1] = iss.Replacement + + issues[i] = iss + } + + return issues +} + +// checkPeriod checks that the last sentense of the text ends in a period. +// NOTE: Returned position is a position inside given text, not in the +// original file. +func checkPeriod(comment string) (pos position, ok bool) { + // Check last non-empty line + var found bool + var line string + lines := strings.Split(comment, "\n") + for i := len(lines) - 1; i >= 0; i-- { + line = strings.TrimRightFunc(lines[i], unicode.IsSpace) + if line == "" { + continue + } + found = true + pos.line = i + 1 + break + } + // All lines are empty + if !found { + return position{}, true + } + // Correct line + if hasSuffix(line, lastChars) { + return position{}, true + } + + pos.column = len([]rune(line)) + 1 + return pos, false +} + +// checkCapital checks that the each sentense of the text starts with +// a capital letter. +// NOTE: First letter is not checked in declaration comments, because they +// can describe unexported functions, which start from small letter. +func checkCapital(comment string, skipFirst bool) (pp []position) { + // List of states during the scan: `empty` - nothing special, + // `endChar` - found one of sentence ending chars (.!?), + // `endOfSentence` - found `endChar`, and then space or newline. + const empty, endChar, endOfSentence = 1, 2, 3 + + pos := position{line: 1} + state := endOfSentence + if skipFirst { + state = empty + } + for _, r := range comment { + s := string(r) + + pos.column++ + if s == "\n" { + pos.line++ + pos.column = 0 + if state == endChar { + state = endOfSentence + } + continue + } + if s == "." || s == "!" || s == "?" { + state = endChar + continue + } + if s == ")" && state == endChar { + continue + } + if s == " " { + if state == endChar { + state = endOfSentence + } + continue + } + if state == endOfSentence && unicode.IsLower(r) { + pp = append(pp, position{line: pos.line, column: pos.column}) + } + state = empty + } + return pp +} + +// isSpecialBlock checks that given block of comment lines is special and +// shouldn't be checked as a regular sentence. +func isSpecialBlock(comment string) bool { + // Skip cgo code blocks + // TODO: Find a better way to detect cgo code + if strings.HasPrefix(comment, "/*") && (strings.Contains(comment, "#include") || + strings.Contains(comment, "#define")) { + return true + } + return false +} + +// isSpecialBlock checks that given comment line is special and +// shouldn't be checked as a regular sentence. +func isSpecialLine(comment string) bool { + // Skip cgo export tags: https://golang.org/cmd/cgo/#hdr-C_references_to_Go + if strings.HasPrefix(comment, "//export ") { + return true + } + + comment = strings.TrimPrefix(comment, "//") + comment = strings.TrimPrefix(comment, "/*") + + // Don't check comments starting with space indentation - they may + // contain code examples, which shouldn't end with period + if strings.HasPrefix(comment, " ") || + strings.HasPrefix(comment, " \t") || + strings.HasPrefix(comment, "\t") { + return true + } + + // Skip tags and URLs + comment = strings.TrimSpace(comment) + if tags.MatchString(comment) || + hashtags.MatchString(comment) || + endURL.MatchString(comment) || + strings.HasPrefix(comment, "+build") { + return true + } + + return false +} + +func hasSuffix(s string, suffixes []string) bool { + for _, suffix := range suffixes { + if strings.HasSuffix(s, suffix) { + return true + } + } + return false +} diff --git a/vendor/github.com/tetafro/godot/getters.go b/vendor/github.com/tetafro/godot/getters.go new file mode 100644 index 0000000..02b8ace --- /dev/null +++ b/vendor/github.com/tetafro/godot/getters.go @@ -0,0 +1,260 @@ +package godot + +import ( + "errors" + "fmt" + "go/ast" + "go/token" + "io/ioutil" + "regexp" + "strings" +) + +var errEmptyInput = errors.New("empty input") + +// specialReplacer is a replacer for some types of special lines in comments, +// which shouldn't be checked. For example, if comment ends with a block of +// code it should not necessarily have a period at the end. +const specialReplacer = "" + +type parsedFile struct { + fset *token.FileSet + file *ast.File + lines []string +} + +func newParsedFile(file *ast.File, fset *token.FileSet) (*parsedFile, error) { + if file == nil || fset == nil || len(file.Comments) == 0 { + return nil, errEmptyInput + } + + pf := parsedFile{ + fset: fset, + file: file, + } + + var err error + + // Read original file. This is necessary for making a replacements for + // inline comments. I couldn't find a better way to get original line + // with code and comment without reading the file. Function `Format` + // from "go/format" won't help here if the original file is not gofmt-ed. + pf.lines, err = readFile(file, fset) + if err != nil { + return nil, fmt.Errorf("read file: %v", err) + } + + // Check consistency to avoid checking slice indexes in each function + lastComment := pf.file.Comments[len(pf.file.Comments)-1] + if p := pf.fset.Position(lastComment.End()); len(pf.lines) < p.Line { + return nil, fmt.Errorf("inconsistence between file and AST: %s", p.Filename) + } + + return &pf, nil +} + +// getComments extracts comments from a file. +func (pf *parsedFile) getComments(scope Scope, exclude []*regexp.Regexp) []comment { + var comments []comment + decl := pf.getDeclarationComments(exclude) + switch scope { + case AllScope: + // All comments + comments = pf.getAllComments(exclude) + case TopLevelScope: + // All top level comments and comments from the inside + // of top level blocks + comments = append( + pf.getBlockComments(exclude), + pf.getTopLevelComments(exclude)..., + ) + default: + // Top level declaration comments and comments from the inside + // of top level blocks + comments = append(pf.getBlockComments(exclude), decl...) + } + + // Set `decl` flag + setDecl(comments, decl) + + return comments +} + +// getBlockComments gets comments from the inside of top level blocks: +// var (...), const (...). +func (pf *parsedFile) getBlockComments(exclude []*regexp.Regexp) []comment { + var comments []comment + for _, decl := range pf.file.Decls { + d, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + // No parenthesis == no block + if d.Lparen == 0 { + continue + } + for _, c := range pf.file.Comments { + if c == nil || len(c.List) == 0 { + continue + } + // Skip comments outside this block + if d.Lparen > c.Pos() || c.Pos() > d.Rparen { + continue + } + // Skip comments that are not top-level for this block + // (the block itself is top level, so comments inside this block + // would be on column 2) + // nolint: gomnd + if pf.fset.Position(c.Pos()).Column != 2 { + continue + } + firstLine := pf.fset.Position(c.Pos()).Line + lastLine := pf.fset.Position(c.End()).Line + comments = append(comments, comment{ + lines: pf.lines[firstLine-1 : lastLine], + text: getText(c, exclude), + start: pf.fset.Position(c.List[0].Slash), + }) + } + } + return comments +} + +// getTopLevelComments gets all top level comments. +func (pf *parsedFile) getTopLevelComments(exclude []*regexp.Regexp) []comment { + var comments []comment // nolint: prealloc + for _, c := range pf.file.Comments { + if c == nil || len(c.List) == 0 { + continue + } + if pf.fset.Position(c.Pos()).Column != 1 { + continue + } + firstLine := pf.fset.Position(c.Pos()).Line + lastLine := pf.fset.Position(c.End()).Line + comments = append(comments, comment{ + lines: pf.lines[firstLine-1 : lastLine], + text: getText(c, exclude), + start: pf.fset.Position(c.List[0].Slash), + }) + } + return comments +} + +// getDeclarationComments gets top level declaration comments. +func (pf *parsedFile) getDeclarationComments(exclude []*regexp.Regexp) []comment { + var comments []comment // nolint: prealloc + for _, decl := range pf.file.Decls { + var cg *ast.CommentGroup + switch d := decl.(type) { + case *ast.GenDecl: + cg = d.Doc + case *ast.FuncDecl: + cg = d.Doc + } + + if cg == nil || len(cg.List) == 0 { + continue + } + + firstLine := pf.fset.Position(cg.Pos()).Line + lastLine := pf.fset.Position(cg.End()).Line + comments = append(comments, comment{ + lines: pf.lines[firstLine-1 : lastLine], + text: getText(cg, exclude), + start: pf.fset.Position(cg.List[0].Slash), + }) + } + return comments +} + +// getAllComments gets every single comment from the file. +func (pf *parsedFile) getAllComments(exclude []*regexp.Regexp) []comment { + var comments []comment //nolint: prealloc + for _, c := range pf.file.Comments { + if c == nil || len(c.List) == 0 { + continue + } + firstLine := pf.fset.Position(c.Pos()).Line + lastLine := pf.fset.Position(c.End()).Line + comments = append(comments, comment{ + lines: pf.lines[firstLine-1 : lastLine], + start: pf.fset.Position(c.List[0].Slash), + text: getText(c, exclude), + }) + } + return comments +} + +// getText extracts text from comment. If comment is a special block +// (e.g., CGO code), a block of empty lines is returned. If comment contains +// special lines (e.g., tags or indented code examples), they are replaced +// with `specialReplacer` to skip checks for it. +// The result can be multiline. +func getText(comment *ast.CommentGroup, exclude []*regexp.Regexp) (s string) { + if len(comment.List) == 1 && + strings.HasPrefix(comment.List[0].Text, "/*") && + isSpecialBlock(comment.List[0].Text) { + return "" + } + + for _, c := range comment.List { + text := c.Text + isBlock := false + if strings.HasPrefix(c.Text, "/*") { + isBlock = true + text = strings.TrimPrefix(text, "/*") + text = strings.TrimSuffix(text, "*/") + } + for _, line := range strings.Split(text, "\n") { + if isSpecialLine(line) { + s += specialReplacer + "\n" + continue + } + if !isBlock { + line = strings.TrimPrefix(line, "//") + } + if matchAny(line, exclude) { + s += specialReplacer + "\n" + continue + } + s += line + "\n" + } + } + if len(s) == 0 { + return "" + } + return s[:len(s)-1] // trim last "\n" +} + +// readFile reads file and returns it's lines as strings. +func readFile(file *ast.File, fset *token.FileSet) ([]string, error) { + fname := fset.File(file.Package) + f, err := ioutil.ReadFile(fname.Name()) + if err != nil { + return nil, err + } + return strings.Split(string(f), "\n"), nil +} + +// setDecl sets `decl` flag to comments which are declaration comments. +func setDecl(comments, decl []comment) { + for _, d := range decl { + for i, c := range comments { + if d.start == c.start { + comments[i].decl = true + break + } + } + } +} + +// matchAny checks if string matches any of given regexps. +func matchAny(s string, rr []*regexp.Regexp) bool { + for _, re := range rr { + if re.MatchString(s) { + return true + } + } + return false +} diff --git a/vendor/github.com/tetafro/godot/go.mod b/vendor/github.com/tetafro/godot/go.mod index d86531c..b67b947 100644 --- a/vendor/github.com/tetafro/godot/go.mod +++ b/vendor/github.com/tetafro/godot/go.mod @@ -1,3 +1,3 @@ module github.com/tetafro/godot -go 1.14 +go 1.15 diff --git a/vendor/github.com/tetafro/godot/godot.go b/vendor/github.com/tetafro/godot/godot.go index 5eec835..526a5f7 100644 --- a/vendor/github.com/tetafro/godot/godot.go +++ b/vendor/github.com/tetafro/godot/godot.go @@ -1,151 +1,135 @@ -// Package godot checks if all top-level comments contain a period at the -// end of the last sentence if needed. +// Package godot checks if comments contain a period at the end of the last +// sentence if needed. package godot import ( + "fmt" "go/ast" "go/token" + "io/ioutil" + "os" "regexp" + "sort" "strings" ) -const noPeriodMessage = "Top level comment should end in a period" +// NOTE: Line and column indexes are 1-based. -// Message contains a message of linting error. -type Message struct { - Pos token.Position - Message string -} +// NOTE: Errors `invalid line number inside comment...` should never happen. +// Their goal is to prevent panic, if there's a bug with array indexes. -// Settings contains linter settings. -type Settings struct { - // Check all top-level comments, not only declarations - CheckAll bool +// Issue contains a description of linting error and a recommended replacement. +type Issue struct { + Pos token.Position + Message string + Replacement string } -var ( - // List of valid last characters. - lastChars = []string{".", "?", "!"} - - // Special tags in comments like "nolint" or "build". - tags = regexp.MustCompile("^[a-z]+:") - - // Special hashtags in comments like "#nosec". - hashtags = regexp.MustCompile("^#[a-z]+ ") +// position is a position inside a comment (might be multiline comment). +type position struct { + line int + column int +} - // URL at the end of the line. - endURL = regexp.MustCompile(`[a-z]+://[^\s]+$`) -) +// comment is an internal representation of AST comment entity with additional +// data attached. The latter is used for creating a full replacement for +// the line with issues. +type comment struct { + lines []string // unmodified lines from file + text string // concatenated `lines` with special parts excluded + start token.Position // position of the first symbol in comment + decl bool // whether comment is a special one (should not be checked) +} // Run runs this linter on the provided code. -func Run(file *ast.File, fset *token.FileSet, settings Settings) []Message { - msgs := []Message{} - - // Check all top-level comments - if settings.CheckAll { - for _, group := range file.Comments { - if ok, msg := check(fset, group); !ok { - msgs = append(msgs, msg) - } - } - return msgs +func Run(file *ast.File, fset *token.FileSet, settings Settings) ([]Issue, error) { + pf, err := newParsedFile(file, fset) + if err == errEmptyInput { + return nil, nil } - - // Check only declaration comments - for _, decl := range file.Decls { - switch d := decl.(type) { - case *ast.GenDecl: - if ok, msg := check(fset, d.Doc); !ok { - msgs = append(msgs, msg) - } - case *ast.FuncDecl: - if ok, msg := check(fset, d.Doc); !ok { - msgs = append(msgs, msg) - } - } + if err != nil { + return nil, fmt.Errorf("parse input file: %v", err) } - return msgs -} -func check(fset *token.FileSet, group *ast.CommentGroup) (ok bool, msg Message) { - if group == nil || len(group.List) == 0 { - return true, Message{} + exclude := make([]*regexp.Regexp, len(settings.Exclude)) + for i := 0; i < len(settings.Exclude); i++ { + exclude[i], err = regexp.Compile(settings.Exclude[i]) + if err != nil { + return nil, fmt.Errorf("invalid regexp: %v", err) + } } - // Check only top-level comments - if fset.Position(group.Pos()).Column > 1 { - return true, Message{} - } + comments := pf.getComments(settings.Scope, exclude) + issues := checkComments(comments, settings) + sortIssues(issues) - // Get last element from comment group - it can be either - // last (or single) line for "//"-comment, or multiline string - // for "/*"-comment - last := group.List[len(group.List)-1] + return issues, nil +} - line, ok := checkComment(last.Text) - if ok { - return true, Message{} +// Fix fixes all issues and returns new version of file content. +func Fix(path string, file *ast.File, fset *token.FileSet, settings Settings) ([]byte, error) { + // Read file + content, err := ioutil.ReadFile(path) // nolint: gosec + if err != nil { + return nil, fmt.Errorf("read file: %v", err) } - pos := fset.Position(last.Slash) - pos.Line += line - return false, Message{ - Pos: pos, - Message: noPeriodMessage, + if len(content) == 0 { + return nil, nil } -} -func checkComment(comment string) (line int, ok bool) { - // Check last line of "//"-comment - if strings.HasPrefix(comment, "//") { - comment = strings.TrimPrefix(comment, "//") - return 0, checkLastChar(comment) + issues, err := Run(file, fset, settings) + if err != nil { + return nil, fmt.Errorf("run linter: %v", err) } - // Skip cgo code blocks - // TODO: Find a better way to detect cgo code. - if strings.Contains(comment, "#include") || strings.Contains(comment, "#define") { - return 0, true + // slice -> map + m := map[int]Issue{} + for _, iss := range issues { + m[iss.Pos.Line] = iss } - // Check last non-empty line in multiline "/*"-comment block - lines := strings.Split(comment, "\n") - var i int - for i = len(lines) - 1; i >= 0; i-- { - if s := strings.TrimSpace(lines[i]); s == "*/" || s == "" { - continue + // Replace lines from issues + fixed := make([]byte, 0, len(content)) + for i, line := range strings.Split(string(content), "\n") { + newline := line + if iss, ok := m[i+1]; ok { + newline = iss.Replacement } - break + fixed = append(fixed, []byte(newline+"\n")...) } - comment = strings.TrimPrefix(lines[i], "/*") - comment = strings.TrimSuffix(comment, "*/") - return i, checkLastChar(comment) + fixed = fixed[:len(fixed)-1] // trim last "\n" + + return fixed, nil } -func checkLastChar(s string) bool { - // Don't check comments starting with space indentation - they may - // contain code examples, which shouldn't end with period - if strings.HasPrefix(s, " ") || strings.HasPrefix(s, " \t") || strings.HasPrefix(s, "\t") { - return true - } - // Skip cgo export tags: https://golang.org/cmd/cgo/#hdr-C_references_to_Go - if strings.HasPrefix(s, "export") { - return true +// Replace rewrites original file with it's fixed version. +func Replace(path string, file *ast.File, fset *token.FileSet, settings Settings) error { + info, err := os.Stat(path) + if err != nil { + return fmt.Errorf("check file: %v", err) } - s = strings.TrimSpace(s) - if tags.MatchString(s) || - hashtags.MatchString(s) || - endURL.MatchString(s) || - strings.HasPrefix(s, "+build") { - return true + mode := info.Mode() + + fixed, err := Fix(path, file, fset, settings) + if err != nil { + return fmt.Errorf("fix issues: %v", err) } - // Don't check empty lines - if s == "" { - return true + + if err := ioutil.WriteFile(path, fixed, mode); err != nil { + return fmt.Errorf("write file: %v", err) } - for _, ch := range lastChars { - if string(s[len(s)-1]) == ch { - return true + return nil +} + +// sortIssues sorts by filename, line and column. +func sortIssues(iss []Issue) { + sort.Slice(iss, func(i, j int) bool { + if iss[i].Pos.Filename != iss[j].Pos.Filename { + return iss[i].Pos.Filename < iss[j].Pos.Filename } - } - return false + if iss[i].Pos.Line != iss[j].Pos.Line { + return iss[i].Pos.Line < iss[j].Pos.Line + } + return iss[i].Pos.Column < iss[j].Pos.Column + }) } diff --git a/vendor/github.com/tetafro/godot/settings.go b/vendor/github.com/tetafro/godot/settings.go new file mode 100644 index 0000000..b71bf5d --- /dev/null +++ b/vendor/github.com/tetafro/godot/settings.go @@ -0,0 +1,29 @@ +package godot + +// Settings contains linter settings. +type Settings struct { + // Which comments to check (top level declarations, top level, all). + Scope Scope + + // Regexp for excluding particular comment lines from check. + Exclude []string + + // Check periods at the end of sentences. + Period bool + + // Check that first letter of each sentence is capital. + Capital bool +} + +// Scope sets which comments should be checked. +type Scope string + +// List of available check scopes. +const ( + // DeclScope is for top level declaration comments. + DeclScope Scope = "declarations" + // TopLevelScope is for all top level comments. + TopLevelScope Scope = "toplevel" + // AllScope is for all comments. + AllScope Scope = "all" +) diff --git a/vendor/github.com/tomarrell/wrapcheck/LICENSE b/vendor/github.com/tomarrell/wrapcheck/LICENSE new file mode 100644 index 0000000..b5d9d30 --- /dev/null +++ b/vendor/github.com/tomarrell/wrapcheck/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Tom Arrell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/tomarrell/wrapcheck/wrapcheck/wrapcheck.go b/vendor/github.com/tomarrell/wrapcheck/wrapcheck/wrapcheck.go new file mode 100644 index 0000000..e249a7e --- /dev/null +++ b/vendor/github.com/tomarrell/wrapcheck/wrapcheck/wrapcheck.go @@ -0,0 +1,226 @@ +package wrapcheck + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" +) + +var ignoredIDs = []string{ + "func fmt.Errorf(format string, a ...interface{}) error", + "func errors.New(text string) error", + "func errors.Unwrap(err error) error", +} + +var Analyzer = &analysis.Analyzer{ + Name: "wrapcheck", + Doc: "Checks that errors returned from external packages are wrapped", + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, file := range pass.Files { + ast.Inspect(file, func(n ast.Node) bool { + if _, ok := n.(*ast.AssignStmt); ok { + return true + } + + ret, ok := n.(*ast.ReturnStmt) + if !ok { + return true + } + + if len(ret.Results) < 1 { + return true + } + + // Iterate over the values to be returned looking for errors + for _, expr := range ret.Results { + if !isError(pass.TypesInfo.TypeOf(expr)) { + continue + } + + ident, ok := expr.(*ast.Ident) + if !ok { + return true + } + + var ( + call *ast.CallExpr + ) + + // Attempt to find the most recent short assign + if shortAss := prevErrAssign(pass, file, ident); shortAss != nil { + call, ok = shortAss.Rhs[0].(*ast.CallExpr) + if !ok { + return true + } + } else if isUnresolved(file, ident) { + // TODO Check if the identifier is unresolved, and try to resolve it in + // another file. + return true + } else { + // Check for ValueSpec nodes in order to locate a possible var + // declaration. + if ident.Obj == nil { + return true + } + + vSpec, ok := ident.Obj.Decl.(*ast.ValueSpec) + if !ok { + // We couldn't find a short or var assign for this error return. + // This is an error. Where did this identifier come from? Possibly a + // function param. + // + // TODO decide how to handle this case, whether to follow function + // param back, or assert wrapping at call site. + + return true + } + + if len(vSpec.Values) < 1 { + return true + } + + call, ok = vSpec.Values[0].(*ast.CallExpr) + if !ok { + return true + } + } + + // Make sure there is a call identified as producing the error being + // returned, otherwise just bail + if call == nil { + return true + } + + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + + // Check if the underlying type of the "x" in x.y.z is an interface, as + // errors returned from interface types should be wrapped. + if isInterface(pass, sel, ident) { + pass.Reportf(ident.NamePos, "error returned from interface method should be wrapped") + return true + } + + // Check whether the function being called comes from another package, + // as functions called across package boundaries which returns errors + // should be wrapped + if isFromOtherPkg(pass, sel, ident) { + pass.Reportf(ident.NamePos, "error returned from external package is unwrapped") + return true + } + } + + return true + }) + } + + return nil, nil +} + +// isInterface returns whether the function call is one defined on an interface. +func isInterface(pass *analysis.Pass, sel *ast.SelectorExpr, ident *ast.Ident) bool { + _, ok := pass.TypesInfo.TypeOf(sel.X).Underlying().(*types.Interface) + + return ok +} + +func isFromOtherPkg(pass *analysis.Pass, sel *ast.SelectorExpr, ident *ast.Ident) bool { + // The package of the function that we are calling which returns the error + fn := pass.TypesInfo.ObjectOf(sel.Sel) + + // If it's not a package name, then we should check the selector to make sure + // that it's an identifier from the same package + if pass.Pkg.Path() == fn.Pkg().Path() { + return false + } else if contains(ignoredIDs, fn.String()) { + return false + } + + return true +} + +// prevErrAssign traverses the AST of a file looking for the most recent +// assignment to an error declaration which is specified by the returnIdent +// identifier. +// +// This only returns short form assignments and reassignments, e.g. `:=` and +// `=`. This does not include `var` statements. This function will return nil if +// the only declaration is a `var` (aka ValueSpec) declaration. +func prevErrAssign(pass *analysis.Pass, file *ast.File, returnIdent *ast.Ident) *ast.AssignStmt { + // A slice containing all the assignments which contain an identifer + // referring to the source declaration of the error. This is to catch + // cases where err is defined once, and then reassigned multiple times + // within the same block. In these cases, we should check the method of + // the most recent call. + var assigns []*ast.AssignStmt + + // Find all assignments which have the same declaration + ast.Inspect(file, func(n ast.Node) bool { + if ass, ok := n.(*ast.AssignStmt); ok { + for _, expr := range ass.Lhs { + if !isError(pass.TypesInfo.TypeOf(expr)) { + continue + } + if assIdent, ok := expr.(*ast.Ident); ok { + if assIdent.Obj == nil || returnIdent.Obj == nil { + // If we can't find the Obj for one of the identifiers, just skip + // it. + return true + } else if assIdent.Obj.Decl == returnIdent.Obj.Decl { + assigns = append(assigns, ass) + } + } + } + } + + return true + }) + + // Iterate through the assignments, comparing the token positions to + // find the assignment that directly precedes the return position + var mostRecentAssign *ast.AssignStmt + + for _, ass := range assigns { + if ass.Pos() > returnIdent.Pos() { + break + } + mostRecentAssign = ass + } + + return mostRecentAssign +} + +func contains(slice []string, el string) bool { + for _, s := range slice { + if s == el { + return true + } + } + + return false +} + +// isError returns whether or not the provided type interface is an error +func isError(typ types.Type) bool { + if typ == nil { + return false + } + + return typ.String() == "error" +} + +func isUnresolved(file *ast.File, ident *ast.Ident) bool { + for _, unresolvedIdent := range file.Unresolved { + if unresolvedIdent.Pos() == ident.Pos() { + return true + } + } + + return false +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/.goreleaser.yml b/vendor/github.com/tommy-muehle/go-mnd/.goreleaser.yml deleted file mode 100644 index 0986ff2..0000000 --- a/vendor/github.com/tommy-muehle/go-mnd/.goreleaser.yml +++ /dev/null @@ -1,30 +0,0 @@ -builds: - - - main: ./cmd/mnd/main.go - binary: mnd - goos: - - windows - - darwin - - linux - goarch: - - amd64 - ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.buildTime={{.Date}}`. - -archives: - - - format: tar.gz - format_overrides: - - goos: windows - format: zip - -brews: - - - name: mnd - github: - owner: tommy-muehle - name: homebrew-tap - folder: Formula - homepage: https://github.com/tommy-muehle/go-mnd - description: Magic number detector for Go - test: | - system "#{bin}/mnd --version" diff --git a/vendor/github.com/tommy-muehle/go-mnd/.travis.yml b/vendor/github.com/tommy-muehle/go-mnd/.travis.yml deleted file mode 100644 index fd76a2f..0000000 --- a/vendor/github.com/tommy-muehle/go-mnd/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: go - -go: - - 1.13.x - - 1.12.x - - tip - -script: - - go test -v ./... - -notifications: - email: false diff --git a/vendor/github.com/tommy-muehle/go-mnd/README.md b/vendor/github.com/tommy-muehle/go-mnd/README.md deleted file mode 100644 index a85ed78..0000000 --- a/vendor/github.com/tommy-muehle/go-mnd/README.md +++ /dev/null @@ -1,111 +0,0 @@ -# go-mnd - Magic number detector for Golang - -A vet analyzer to detect magic numbers. - -> **What is a magic number?** -> A magic number is a numeric literal that is not defined as a constant, but which may change, and therefore can be hard to update. It's considered a bad programming practice to use numbers directly in any source code without an explanation. It makes programs harder to read, understand, and maintain. - -## Project status - -[![Build Status](https://travis-ci.org/tommy-muehle/go-mnd.svg?branch=master)](https://travis-ci.org/tommy-muehle/go-mnd) -[![Go Report Card](https://goreportcard.com/badge/github.com/tommy-muehle/go-mnd)](https://goreportcard.com/report/github.com/tommy-muehle/go-mnd) - -## Install - -This analyzer requires Golang in version >= 1.12 because it's depends on the **go/analysis** API. - -``` -go get github.com/tommy-muehle/go-mnd/cmd/mnd -``` - -To install with [Homebrew](https://brew.sh/), run: - -``` -brew tap tommy-muehle/tap && brew install tommy-muehle/tap/mnd -``` - -On Windows download the [latest release](https://github.com/tommy-muehle/go-mnd/releases). - -## Usage - -[![asciicast](https://asciinema.org/a/231021.svg)](https://asciinema.org/a/231021) - -``` -go vet -vettool $(which mnd) ./... -``` - -or directly - -``` -mnd ./... -``` - -The ```-checks``` option let's you define a comma separated list of checks. - -The ```-ignored-numbers``` option let's you define a comma separated list of numbers to ignore. - -The ```-excludes``` option let's you define a comma separated list of regexp patterns to exclude. - -## Checks - -By default this detector analyses arguments, assigns, cases, conditions, operations and return statements. - -* argument - -``` -t := http.StatusText(200) -``` - -* assign - -``` -c := &http.Client{ - Timeout: 5 * time.Second, -} -``` - -* case - -``` -switch x { - case 3: -} -``` - -* condition - -``` -if x > 7 { -} -``` - -* operation - -``` -var x, y int -y = 10 * x -``` - -* return - -``` -return 3 -``` - -## Excludes - -By default the numbers 0 and 1 as well as test files are excluded! - -### Further known excludes - -The function "Date" in the "Time" package. - -``` -t := time.Date(2017, time.September, 26, 12, 13, 14, 0, time.UTC) -``` - -Additional custom excludes can be defined via option flag. - -## License - -The MIT License (MIT). Please see [LICENSE](LICENSE) for more information. diff --git a/vendor/github.com/tommy-muehle/go-mnd/go.mod b/vendor/github.com/tommy-muehle/go-mnd/go.mod deleted file mode 100644 index 5119a7c..0000000 --- a/vendor/github.com/tommy-muehle/go-mnd/go.mod +++ /dev/null @@ -1,11 +0,0 @@ -module github.com/tommy-muehle/go-mnd - -go 1.12 - -require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/stretchr/objx v0.1.1 // indirect - github.com/stretchr/testify v1.3.0 - golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 // indirect - golang.org/x/tools v0.0.0-20190221204921-83362c3779f5 -) diff --git a/vendor/github.com/tommy-muehle/go-mnd/go.sum b/vendor/github.com/tommy-muehle/go-mnd/go.sum deleted file mode 100644 index bdc021a..0000000 --- a/vendor/github.com/tommy-muehle/go-mnd/go.sum +++ /dev/null @@ -1,18 +0,0 @@ -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/tools v0.0.0-20190221204921-83362c3779f5 h1:ev5exjGDsOo0NPTB0qdCcE53BfWl1IICJlhgXgfT9fM= -golang.org/x/tools v0.0.0-20190221204921-83362c3779f5/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= diff --git a/vendor/github.com/tommy-muehle/go-mnd/.editorconfig b/vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig similarity index 55% rename from vendor/github.com/tommy-muehle/go-mnd/.editorconfig rename to vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig index 316b8ca..fe2c20f 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/.editorconfig +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig @@ -9,4 +9,13 @@ insert_final_newline = true trim_trailing_whitespace = true [*.md] -trim_trailing_whitespace = false \ No newline at end of file +trim_trailing_whitespace = false + +[*.json] +indent_size = 2 + +[*.{yaml,yml}] +indent_size = 2 + +[Makefile] +indent_style = tab diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes new file mode 100644 index 0000000..0053581 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes @@ -0,0 +1,9 @@ +/.gitattributes export-ignore +/.gitignore export-ignore +/.editorconfig export-ignore +/.goreleaser.yml export-ignore +/.github/ export-ignore +/examples/ export-ignore +/testdata/ export-ignore +/tools/ export-ignore +/Makefile export-ignore diff --git a/vendor/github.com/tommy-muehle/go-mnd/.gitignore b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore similarity index 50% rename from vendor/github.com/tommy-muehle/go-mnd/.gitignore rename to vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore index edd9d60..abc11b3 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/.gitignore +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore @@ -1,2 +1,3 @@ build/ dist/ +coverage.txt diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml b/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml new file mode 100644 index 0000000..e0a87b8 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml @@ -0,0 +1,27 @@ +builds: + - main: ./cmd/mnd/main.go + binary: mnd + goos: + - windows + - darwin + - linux + goarch: + - amd64 + ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.buildTime={{.Date}}`. + +archives: + - format: tar.gz + format_overrides: + - goos: windows + format: zip + +brews: + - name: mnd + github: + owner: tommy-muehle + name: homebrew-tap + folder: Formula + homepage: https://github.com/tommy-muehle/go-mnd + description: Magic number detector for Go + test: | + system "#{bin}/mnd --version" diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile b/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile new file mode 100644 index 0000000..bb8e2b7 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile @@ -0,0 +1,17 @@ +ARG GO_VERSION=1.15 + +FROM golang:${GO_VERSION}-alpine AS builder +RUN apk add --update --no-cache make git curl gcc libc-dev +RUN mkdir -p /build +WORKDIR /build +COPY . /build/ +RUN go mod download +RUN go build -o go-mnd cmd/mnd/main.go + +FROM golang:${GO_VERSION}-alpine +RUN apk add --update --no-cache bash git gcc libc-dev +COPY --from=builder /build/go-mnd /bin/go-mnd +COPY entrypoint.sh /bin/entrypoint.sh +VOLUME /app +WORKDIR /app +ENTRYPOINT ["/bin/entrypoint.sh"] diff --git a/vendor/github.com/tommy-muehle/go-mnd/LICENSE b/vendor/github.com/tommy-muehle/go-mnd/v2/LICENSE similarity index 100% rename from vendor/github.com/tommy-muehle/go-mnd/LICENSE rename to vendor/github.com/tommy-muehle/go-mnd/v2/LICENSE diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile b/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile new file mode 100644 index 0000000..b8a3231 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile @@ -0,0 +1,32 @@ +GIT_TAG?= $(shell git describe --abbrev=0) + +GO_VERSION = 1.15 +BUILDFLAGS := '-w -s' + +IMAGE_REPO = "tommymuehle" +BIN = "go-mnd" + +clean: + rm -rf build dist coverage.txt + +test: + go test -race ./... + +test-coverage: + go test -race -coverprofile=coverage.txt -covermode=atomic -coverpkg=./checks,./config + +build: + go build -o build/$(BIN) cmd/mnd/main.go + +image: + @echo "Building the Docker image..." + docker build --rm -t $(IMAGE_REPO)/$(BIN):$(GIT_TAG) --build-arg GO_VERSION=$(GO_VERSION) . + docker tag $(IMAGE_REPO)/$(BIN):$(GIT_TAG) $(IMAGE_REPO)/$(BIN):$(GIT_TAG) + docker tag $(IMAGE_REPO)/$(BIN):$(GIT_TAG) $(IMAGE_REPO)/$(BIN):latest + +image-push: image + @echo "Pushing the Docker image..." + docker push $(IMAGE_REPO)/$(BIN):$(GIT_TAG) + docker push $(IMAGE_REPO)/$(BIN):latest + +.PHONY: clean test test-coverage build image image-push diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/README.md b/vendor/github.com/tommy-muehle/go-mnd/v2/README.md new file mode 100644 index 0000000..fe679e1 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/README.md @@ -0,0 +1,230 @@ +# go-mnd - Magic number detector for Golang + + + +A vet analyzer to detect magic numbers. + +> **What is a magic number?** +> A magic number is a numeric literal that is not defined as a constant, but which may change, and therefore can be hard to update. It's considered a bad programming practice to use numbers directly in any source code without an explanation. It makes programs harder to read, understand, and maintain. + +## Project status + +![CI](https://github.com/tommy-muehle/go-mnd/workflows/CI/badge.svg) +[![Go Report Card](https://goreportcard.com/badge/github.com/tommy-muehle/go-mnd)](https://goreportcard.com/report/github.com/tommy-muehle/go-mnd) +[![codecov](https://codecov.io/gh/tommy-muehle/go-mnd/branch/master/graph/badge.svg)](https://codecov.io/gh/tommy-muehle/go-mnd) + +## Install + +### Local + +This analyzer requires Golang in version >= 1.12 because it's depends on the **go/analysis** API. + +``` +go get -u github.com/tommy-muehle/go-mnd/cmd/mnd +``` + +### Github action + +You can run go-mnd as a GitHub action as follows: + +``` +name: Example workflow +on: + push: + branches: + - master + pull_request: + branches: + - master +jobs: + tests: + runs-on: ubuntu-latest + env: + GO111MODULE: on + steps: + - name: Checkout Source + uses: actions/checkout@v2 + - name: Run go-mnd + uses: tommy-muehle/go-mnd@master + with: + args: ./... +``` + +### GitLab CI + +You can run go-mnd inside a GitLab CI pipeline as follows: + +``` +stages: + - lint + +go:lint:mnd: + stage: lint + needs: [] + image: golang:latest + before_script: + - go get -u github.com/tommy-muehle/go-mnd/cmd/mnd + - go mod tidy + - go mod vendor + script: + - go vet -vettool $(which mnd) ./... +``` + +### Homebrew + +To install with [Homebrew](https://brew.sh/), run: + +``` +brew tap tommy-muehle/tap && brew install tommy-muehle/tap/mnd +``` + +### Docker + +To get the latest available Docker image: + +``` +docker pull tommymuehle/go-mnd +``` + +### Windows + +On Windows download the [latest release](https://github.com/tommy-muehle/go-mnd/releases). + +## Usage + +[![asciicast](https://asciinema.org/a/231021.svg)](https://asciinema.org/a/231021) + +``` +go vet -vettool $(which mnd) ./... +``` + +or directly + +``` +mnd ./... +``` + +or via Docker + +``` +docker run --rm -v "$PWD":/app -w /app tommymuehle/go-mnd:latest ./... +``` + +## Options + +The ```-checks``` option let's you define a comma separated list of checks. + +The ```-ignored-numbers``` option let's you define a comma separated list of numbers to ignore. +For example: `-ignored-numbers=1000,10_000,3.14159264` + +The ```-ignored-functions``` option let's you define a comma separated list of function name regexp patterns to exclude. +For example: `-ignored-functions=math.*,http.StatusText` + +The ```-ignored-files``` option let's you define a comma separated list of filename regexp patterns to exclude. +For example: `-ignored-files=magic_.*.go,.*_numbers.go` + +## Checks + +By default this detector analyses arguments, assigns, cases, conditions, operations and return statements. + +* argument + +``` +t := http.StatusText(200) +``` + +* assign + +``` +c := &http.Client{ + Timeout: 5 * time.Second, +} +``` + +* case + +``` +switch x { + case 3: +} +``` + +* condition + +``` +if x > 7 { +} +``` + +* operation + +``` +var x, y int +y = 10 * x +``` + +* return + +``` +return 3 +``` + +## Excludes + +By default the numbers 0 and 1 as well as test files are excluded! + +### Further known excludes + +The function "Date" in the "Time" package. + +``` +t := time.Date(2017, time.September, 26, 12, 13, 14, 0, time.UTC) +``` + +Additional custom excludes can be defined via option flag. + +## Development + +### Build + +You can build the binary with: + +``` +make +``` + +### Tests + +You can run all unit tests using: + +``` +make test +``` + +And with coverage report: + +``` +make test-coverage +``` + +### Docker image + +You can also build locally the docker image by using the command: + +``` +make image +``` + +## Stickers + +

+ Stickers image + Sticker image +

+ +Just drop me a message via Twitter DM or email if you want some go-mnd stickers +for you or your Gopher usergroup. + +## License + +The MIT License (MIT). Please see [LICENSE](LICENSE) for more information. diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/action.yml b/vendor/github.com/tommy-muehle/go-mnd/v2/action.yml new file mode 100644 index 0000000..3a1f8eb --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/action.yml @@ -0,0 +1,19 @@ +name: 'go-mnd' +description: 'Runs the Golang magic number detector' +author: '@tommy-muehle' + +inputs: + args: + description: 'Arguments for go-mnd' + required: true + default: '-h' + +runs: + using: 'docker' + image: 'Dockerfile' + args: + - ${{ inputs.args }} + +branding: + icon: 'check-circle' + color: 'blue' diff --git a/vendor/github.com/tommy-muehle/go-mnd/analyzer.go b/vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go similarity index 70% rename from vendor/github.com/tommy-muehle/go-mnd/analyzer.go rename to vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go index 9930170..9153e0b 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/analyzer.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go @@ -3,9 +3,10 @@ package magic_numbers import ( "flag" "go/ast" + "strings" - "github.com/tommy-muehle/go-mnd/checks" - "github.com/tommy-muehle/go-mnd/config" + "github.com/tommy-muehle/go-mnd/v2/checks" + "github.com/tommy-muehle/go-mnd/v2/config" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -30,8 +31,10 @@ type Checker interface { func options() flag.FlagSet { options := flag.NewFlagSet("", flag.ExitOnError) - options.String("excludes", "", "comma separated list of patterns to exclude from analysis") - options.String("ignored-numbers", "", "comma separated list of numbers excluded from analysis") + options.String("excludes", "", "deprecated: use ignored-files instead") + options.String("ignored-files", "", "comma separated list of file patterns to exclude from analysis") + options.String("ignored-functions", "", "comma separated list of function patterns to exclude from analysis") + options.String("ignored-numbers", "", "comma separated list of numbers to exclude from analysis") options.String( "checks", checks.ArgumentCheck+","+ @@ -47,9 +50,24 @@ func options() flag.FlagSet { } func run(pass *analysis.Pass) (interface{}, error) { + var ignoredFiles string + + ignoredFiles = strings.Join( + []string{ + pass.Analyzer.Flags.Lookup("excludes").Value.String(), // is deprecated + pass.Analyzer.Flags.Lookup("ignored-files").Value.String(), + }, + ",", + ) + + if ignoredFiles == "," { + ignoredFiles = "" + } + conf := config.WithOptions( config.WithCustomChecks(pass.Analyzer.Flags.Lookup("checks").Value.String()), - config.WithExcludes(pass.Analyzer.Flags.Lookup("excludes").Value.String()), + config.WithIgnoredFiles(ignoredFiles), + config.WithIgnoredFunctions(pass.Analyzer.Flags.Lookup("ignored-functions").Value.String()), config.WithIgnoredNumbers(pass.Analyzer.Flags.Lookup("ignored-numbers").Value.String()), ) @@ -57,18 +75,23 @@ func run(pass *analysis.Pass) (interface{}, error) { if conf.IsCheckEnabled(checks.ArgumentCheck) { checker = append(checker, checks.NewArgumentAnalyzer(pass, conf)) } + if conf.IsCheckEnabled(checks.CaseCheck) { checker = append(checker, checks.NewCaseAnalyzer(pass, conf)) } + if conf.IsCheckEnabled(checks.ConditionCheck) { checker = append(checker, checks.NewConditionAnalyzer(pass, conf)) } + if conf.IsCheckEnabled(checks.OperationCheck) { checker = append(checker, checks.NewOperationAnalyzer(pass, conf)) } + if conf.IsCheckEnabled(checks.ReturnCheck) { checker = append(checker, checks.NewReturnAnalyzer(pass, conf)) } + if conf.IsCheckEnabled(checks.AssignCheck) { checker = append(checker, checks.NewAssignAnalyzer(pass, conf)) } @@ -76,8 +99,9 @@ func run(pass *analysis.Pass) (interface{}, error) { i := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) for _, c := range checker { + c := c i.Preorder(c.NodeFilter(), func(node ast.Node) { - for _, exclude := range conf.Excludes { + for _, exclude := range conf.IgnoredFiles { if exclude.MatchString(pass.Fset.Position(node.Pos()).Filename) { return } diff --git a/vendor/github.com/tommy-muehle/go-mnd/checks/argument.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go similarity index 64% rename from vendor/github.com/tommy-muehle/go-mnd/checks/argument.go rename to vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go index 34cc1d0..f0c5d71 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/checks/argument.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go @@ -3,19 +3,19 @@ package checks import ( "go/ast" "go/token" + "strconv" + "sync" "golang.org/x/tools/go/analysis" - config "github.com/tommy-muehle/go-mnd/config" + "github.com/tommy-muehle/go-mnd/v2/config" ) const ArgumentCheck = "argument" -// Known excludes for the argument check. -var argumentExcludes = map[string]string{ - // package: function - "time": "Date", // https://golang.org/pkg/time/#Date -} +// constantDefinitions is used to save lines (by number) which contain a constant definition. +var constantDefinitions = map[string]bool{} +var mu sync.RWMutex type ArgumentAnalyzer struct { config *config.Config @@ -31,21 +31,47 @@ func NewArgumentAnalyzer(pass *analysis.Pass, config *config.Config) *ArgumentAn func (a *ArgumentAnalyzer) NodeFilter() []ast.Node { return []ast.Node{ + (*ast.GenDecl)(nil), (*ast.CallExpr)(nil), } } func (a *ArgumentAnalyzer) Check(n ast.Node) { - expr, ok := n.(*ast.CallExpr) - if !ok { - return + switch expr := n.(type) { + case *ast.CallExpr: + a.checkCallExpr(expr) + case *ast.GenDecl: + if expr.Tok == token.CONST { + pos := a.pass.Fset.Position(expr.TokPos) + + mu.Lock() + constantDefinitions[pos.Filename+":"+strconv.Itoa(pos.Line)] = true + mu.Unlock() + } } +} + +func (a *ArgumentAnalyzer) checkCallExpr(expr *ast.CallExpr) { + pos := a.pass.Fset.Position(expr.Pos()) - // Don't check if package and function combination is excluded - if s, ok := expr.Fun.(*ast.SelectorExpr); ok && a.isExcluded(s) { + mu.RLock() + ok := constantDefinitions[pos.Filename+":"+strconv.Itoa(pos.Line)] + mu.RUnlock() + + if ok { return } + switch f := expr.Fun.(type) { + case *ast.SelectorExpr: + switch prefix := f.X.(type) { + case *ast.Ident: + if a.config.IsIgnoredFunction(prefix.Name + "." + f.Sel.Name) { + return + } + } + } + for i, arg := range expr.Args { switch x := arg.(type) { case *ast.BasicLit: @@ -71,21 +97,6 @@ func (a *ArgumentAnalyzer) Check(n ast.Node) { } } -func (a *ArgumentAnalyzer) isExcluded(expr *ast.SelectorExpr) bool { - var p string - - switch x := expr.X.(type) { - case *ast.Ident: - p = x.Name - } - - if v, ok := argumentExcludes[p]; ok && v == expr.Sel.Name { - return true - } - - return false -} - func (a *ArgumentAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { switch x := expr.X.(type) { case *ast.BasicLit: diff --git a/vendor/github.com/tommy-muehle/go-mnd/checks/assign.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go similarity index 62% rename from vendor/github.com/tommy-muehle/go-mnd/checks/assign.go rename to vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go index 8699ce1..f930d08 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/checks/assign.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go @@ -6,7 +6,7 @@ import ( "golang.org/x/tools/go/analysis" - config "github.com/tommy-muehle/go-mnd/config" + config "github.com/tommy-muehle/go-mnd/v2/config" ) const AssignCheck = "assign" @@ -26,22 +26,42 @@ func NewAssignAnalyzer(pass *analysis.Pass, config *config.Config) *AssignAnalyz func (a *AssignAnalyzer) NodeFilter() []ast.Node { return []ast.Node{ (*ast.KeyValueExpr)(nil), + (*ast.AssignStmt)(nil), } } func (a *AssignAnalyzer) Check(n ast.Node) { - expr, ok := n.(*ast.KeyValueExpr) - if !ok { - return + switch expr := n.(type) { + case *ast.KeyValueExpr: + switch x := expr.Value.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, AssignCheck) + } + case *ast.BinaryExpr: + a.checkBinaryExpr(x) + } + case *ast.AssignStmt: + for _, e := range expr.Rhs { + switch y := e.(type) { + case *ast.UnaryExpr: + a.checkUnaryExpr(y) + case *ast.BinaryExpr: + switch x := y.Y.(type) { + case *ast.UnaryExpr: + a.checkUnaryExpr(x) + } + } + } } +} - switch x := expr.Value.(type) { +func (a *AssignAnalyzer) checkUnaryExpr(expr *ast.UnaryExpr) { + switch x := expr.X.(type) { case *ast.BasicLit: if a.isMagicNumber(x) { a.pass.Reportf(x.Pos(), reportMsg, x.Value, AssignCheck) } - case *ast.BinaryExpr: - a.checkBinaryExpr(x) } } diff --git a/vendor/github.com/tommy-muehle/go-mnd/checks/case.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go similarity index 96% rename from vendor/github.com/tommy-muehle/go-mnd/checks/case.go rename to vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go index d7993ed..228cab4 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/checks/case.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go @@ -6,7 +6,7 @@ import ( "golang.org/x/tools/go/analysis" - config "github.com/tommy-muehle/go-mnd/config" + config "github.com/tommy-muehle/go-mnd/v2/config" ) const CaseCheck = "case" diff --git a/vendor/github.com/tommy-muehle/go-mnd/checks/checks.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/checks.go similarity index 100% rename from vendor/github.com/tommy-muehle/go-mnd/checks/checks.go rename to vendor/github.com/tommy-muehle/go-mnd/v2/checks/checks.go diff --git a/vendor/github.com/tommy-muehle/go-mnd/checks/condition.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go similarity index 95% rename from vendor/github.com/tommy-muehle/go-mnd/checks/condition.go rename to vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go index b61bc0d..20f892e 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/checks/condition.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go @@ -6,7 +6,7 @@ import ( "golang.org/x/tools/go/analysis" - config "github.com/tommy-muehle/go-mnd/config" + config "github.com/tommy-muehle/go-mnd/v2/config" ) const ConditionCheck = "condition" diff --git a/vendor/github.com/tommy-muehle/go-mnd/checks/operation.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go similarity index 71% rename from vendor/github.com/tommy-muehle/go-mnd/checks/operation.go rename to vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go index f1f8cf4..ddf3a03 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/checks/operation.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go @@ -6,7 +6,7 @@ import ( "golang.org/x/tools/go/analysis" - config "github.com/tommy-muehle/go-mnd/config" + config "github.com/tommy-muehle/go-mnd/v2/config" ) const OperationCheck = "operation" @@ -26,28 +26,32 @@ func NewOperationAnalyzer(pass *analysis.Pass, config *config.Config) *Operation func (a *OperationAnalyzer) NodeFilter() []ast.Node { return []ast.Node{ (*ast.AssignStmt)(nil), + (*ast.ParenExpr)(nil), } } func (a *OperationAnalyzer) Check(n ast.Node) { - stmt, ok := n.(*ast.AssignStmt) - if !ok { - return - } - - for _, expr := range stmt.Rhs { - switch x := expr.(type) { + switch expr := n.(type) { + case *ast.ParenExpr: + switch x := expr.X.(type) { case *ast.BinaryExpr: - switch xExpr := x.X.(type) { - case *ast.BinaryExpr: - a.checkBinaryExpr(xExpr) - } - switch yExpr := x.Y.(type) { + a.checkBinaryExpr(x) + } + case *ast.AssignStmt: + for _, y := range expr.Rhs { + switch x := y.(type) { case *ast.BinaryExpr: - a.checkBinaryExpr(yExpr) - } + switch xExpr := x.X.(type) { + case *ast.BinaryExpr: + a.checkBinaryExpr(xExpr) + } + switch yExpr := x.Y.(type) { + case *ast.BinaryExpr: + a.checkBinaryExpr(yExpr) + } - a.checkBinaryExpr(x) + a.checkBinaryExpr(x) + } } } } diff --git a/vendor/github.com/tommy-muehle/go-mnd/checks/return.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go similarity index 66% rename from vendor/github.com/tommy-muehle/go-mnd/checks/return.go rename to vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go index be7f546..bc53940 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/checks/return.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go @@ -6,7 +6,7 @@ import ( "golang.org/x/tools/go/analysis" - config "github.com/tommy-muehle/go-mnd/config" + config "github.com/tommy-muehle/go-mnd/v2/config" ) const ReturnCheck = "return" @@ -41,6 +41,24 @@ func (a *ReturnAnalyzer) Check(n ast.Node) { if a.isMagicNumber(x) { a.pass.Reportf(x.Pos(), reportMsg, x.Value, ReturnCheck) } + case *ast.BinaryExpr: + a.checkBinaryExpr(x) + } + } +} + +func (a *ReturnAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { + switch x := expr.X.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, ReturnCheck) + } + } + + switch y := expr.Y.(type) { + case *ast.BasicLit: + if a.isMagicNumber(y) { + a.pass.Reportf(y.Pos(), reportMsg, y.Value, ReturnCheck) } } } diff --git a/vendor/github.com/tommy-muehle/go-mnd/config/config.go b/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go similarity index 50% rename from vendor/github.com/tommy-muehle/go-mnd/config/config.go rename to vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go index 35c82ea..a4681e3 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/config/config.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go @@ -6,9 +6,10 @@ import ( ) type Config struct { - Checks map[string]bool - IgnoredNumbers map[string]struct{} - Excludes []*regexp.Regexp + Checks map[string]bool + IgnoredNumbers map[string]struct{} + IgnoredFunctions []*regexp.Regexp + IgnoredFiles []*regexp.Regexp } type Option func(config *Config) @@ -17,31 +18,50 @@ func DefaultConfig() *Config { return &Config{ Checks: map[string]bool{}, IgnoredNumbers: map[string]struct{}{ - "0": {}, - "1": {}, + "0": {}, + "0.0": {}, + "1": {}, + "1.0": {}, }, - Excludes: []*regexp.Regexp{ + IgnoredFiles: []*regexp.Regexp{ regexp.MustCompile(`_test.go`), }, + IgnoredFunctions: []*regexp.Regexp{ + regexp.MustCompile(`time.Date`), + }, } } func WithOptions(options ...Option) *Config { c := DefaultConfig() + for _, option := range options { option(c) } + return c } -func WithExcludes(excludes string) Option { +func WithIgnoredFunctions(excludes string) Option { + return func(config *Config) { + if excludes == "" { + return + } + + for _, exclude := range strings.Split(excludes, ",") { + config.IgnoredFunctions = append(config.IgnoredFunctions, regexp.MustCompile(exclude)) + } + } +} + +func WithIgnoredFiles(excludes string) Option { return func(config *Config) { if excludes == "" { return } for _, exclude := range strings.Split(excludes, ",") { - config.Excludes = append(config.Excludes, regexp.MustCompile(exclude)) + config.IgnoredFiles = append(config.IgnoredFiles, regexp.MustCompile(exclude)) } } } @@ -53,7 +73,7 @@ func WithIgnoredNumbers(numbers string) Option { } for _, number := range strings.Split(numbers, ",") { - config.IgnoredNumbers[number] = struct{}{} + config.IgnoredNumbers[config.removeDigitSeparator(number)] = struct{}{} } } } @@ -79,6 +99,20 @@ func (c *Config) IsCheckEnabled(name string) bool { } func (c *Config) IsIgnoredNumber(number string) bool { - _, ok := c.IgnoredNumbers[number] + _, ok := c.IgnoredNumbers[c.removeDigitSeparator(number)] return ok } + +func (c *Config) IsIgnoredFunction(f string) bool { + for _, pattern := range c.IgnoredFunctions { + if pattern.MatchString(f) { + return true + } + } + + return false +} + +func (c *Config) removeDigitSeparator(number string) string { + return strings.Replace(number, "_", "", -1) +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh b/vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh new file mode 100644 index 0000000..cabc2f6 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +# Expand the arguments into an array of strings. This is required because the GitHub action +# provides all arguments concatenated as a single string. +ARGS=("$@") + +/bin/go-mnd "${ARGS[*]}" diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/go.mod b/vendor/github.com/tommy-muehle/go-mnd/v2/go.mod new file mode 100644 index 0000000..8e7c18e --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/go.mod @@ -0,0 +1,9 @@ +module github.com/tommy-muehle/go-mnd/v2 + +go 1.12 + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/stretchr/testify v1.3.0 + golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65 +) diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/go.sum b/vendor/github.com/tommy-muehle/go-mnd/v2/go.sum new file mode 100644 index 0000000..991a437 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/go.sum @@ -0,0 +1,28 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65 h1:1KSbntBked74wYsKq0jzXYy7ZwcjAUtrl7EmPE97Iiw= +golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/ultraware/funlen/README.md b/vendor/github.com/ultraware/funlen/README.md index ca7e9a0..aaf3485 100644 --- a/vendor/github.com/ultraware/funlen/README.md +++ b/vendor/github.com/ultraware/funlen/README.md @@ -2,68 +2,8 @@ Funlen is a linter that checks for long functions. It can checks both on the number of lines and the number of statements. -The default limits are 50 lines and 35 statements. You can configure these with the `-l` and `-s` flags. - -Example code: - -```go -package main - -import "fmt" - -func fiveStatements() { - fmt.Println(1) - fmt.Println(2) - fmt.Println(3) - fmt.Println(4) - fmt.Println(5) -} - -func sevenLines() { - fmt.Println(1) - - fmt.Println(2) - - fmt.Println(3) - - fmt.Println(4) -} -``` - -Reults in: - -``` -$ funlen -l=6 -s=4 . -main.go:5:6:Function 'fiveStatements' has too many statements (5 > 4) -main.go:13:6:Function 'sevenLines' is too long (7 > 6) -``` +The default limits are 60 lines and 40 statements. You can configure these. ## Installation guide -```bash -go get git.ultraware.nl/NiseVoid/funlen -``` - -### Gometalinter - -You can add funlen to gometalinter and enable it. - -`.gometalinter.json`: - -```json -{ - "Linters": { - "funlen": "funlen -l=50 -s=35:PATH:LINE:COL:MESSAGE" - }, - - "Enable": [ - "funlen" - ] -} -``` - -commandline: - -```bash -gometalinter --linter "funlen:funlen -l=50 -s=35:PATH:LINE:COL:MESSAGE" --enable "funlen" -``` +Funlen is included in [golangci-lint](https://github.com/golangci/golangci-lint/). Install it and enable funlen. diff --git a/vendor/github.com/ultraware/funlen/main.go b/vendor/github.com/ultraware/funlen/main.go index 19e48e2..2ba3530 100644 --- a/vendor/github.com/ultraware/funlen/main.go +++ b/vendor/github.com/ultraware/funlen/main.go @@ -7,8 +7,10 @@ import ( "reflect" ) -const defaultLineLimit = 60 -const defaultStmtLimit = 40 +const ( + defaultLineLimit = 60 + defaultStmtLimit = 40 +) // Run runs this linter on the provided code func Run(file *ast.File, fset *token.FileSet, lineLimit, stmtLimit int) []Message { @@ -26,13 +28,17 @@ func Run(file *ast.File, fset *token.FileSet, lineLimit, stmtLimit int) []Messag continue } - if stmts := parseStmts(decl.Body.List); stmts > stmtLimit { - msgs = append(msgs, makeStmtMessage(fset, decl.Name, stmts, stmtLimit)) - continue + if stmtLimit > 0 { + if stmts := parseStmts(decl.Body.List); stmts > stmtLimit { + msgs = append(msgs, makeStmtMessage(fset, decl.Name, stmts, stmtLimit)) + continue + } } - if lines := getLines(fset, decl); lines > lineLimit { - msgs = append(msgs, makeLineMessage(fset, decl.Name, lines, lineLimit)) + if lineLimit > 0 { + if lines := getLines(fset, decl); lines > lineLimit { + msgs = append(msgs, makeLineMessage(fset, decl.Name, lines, lineLimit)) + } } } diff --git a/vendor/golang.org/x/mod/modfile/read.go b/vendor/golang.org/x/mod/modfile/read.go index c1f2008..2a961ca 100644 --- a/vendor/golang.org/x/mod/modfile/read.go +++ b/vendor/golang.org/x/mod/modfile/read.go @@ -477,9 +477,17 @@ func (in *input) startToken() { // endToken marks the end of an input token. // It records the actual token string in tok.text. +// A single trailing newline (LF or CRLF) will be removed from comment tokens. func (in *input) endToken(kind tokenKind) { in.token.kind = kind text := string(in.tokenStart[:len(in.tokenStart)-len(in.remaining)]) + if kind.isComment() { + if strings.HasSuffix(text, "\r\n") { + text = text[:len(text)-2] + } else { + text = strings.TrimSuffix(text, "\n") + } + } in.token.text = text in.token.endPos = in.pos } diff --git a/vendor/golang.org/x/mod/modfile/rule.go b/vendor/golang.org/x/mod/modfile/rule.go index 91ca682..83398dd 100644 --- a/vendor/golang.org/x/mod/modfile/rule.go +++ b/vendor/golang.org/x/mod/modfile/rule.go @@ -30,6 +30,7 @@ import ( "golang.org/x/mod/internal/lazyregexp" "golang.org/x/mod/module" + "golang.org/x/mod/semver" ) // A File is the parsed, interpreted form of a go.mod file. @@ -39,6 +40,7 @@ type File struct { Require []*Require Exclude []*Exclude Replace []*Replace + Retract []*Retract Syntax *FileSyntax } @@ -75,6 +77,21 @@ type Replace struct { Syntax *Line } +// A Retract is a single retract statement. +type Retract struct { + VersionInterval + Rationale string + Syntax *Line +} + +// A VersionInterval represents a range of versions with upper and lower bounds. +// Intervals are closed: both bounds are included. When Low is equal to High, +// the interval may refer to a single version ('v1.2.3') or an interval +// ('[v1.2.3, v1.2.3]'); both have the same representation. +type VersionInterval struct { + Low, High string +} + func (f *File) AddModuleStmt(path string) error { if f.Syntax == nil { f.Syntax = new(FileSyntax) @@ -138,7 +155,7 @@ func parseToFile(file string, data []byte, fix VersionFixer, strict bool) (*File for _, x := range fs.Stmt { switch x := x.(type) { case *Line: - f.add(&errs, x, x.Token[0], x.Token[1:], fix, strict) + f.add(&errs, nil, x, x.Token[0], x.Token[1:], fix, strict) case *LineBlock: if len(x.Token) > 1 { @@ -161,9 +178,9 @@ func parseToFile(file string, data []byte, fix VersionFixer, strict bool) (*File }) } continue - case "module", "require", "exclude", "replace": + case "module", "require", "exclude", "replace", "retract": for _, l := range x.Line { - f.add(&errs, l, x.Token[0], l.Token, fix, strict) + f.add(&errs, x, l, x.Token[0], l.Token, fix, strict) } } } @@ -177,7 +194,7 @@ func parseToFile(file string, data []byte, fix VersionFixer, strict bool) (*File var GoVersionRE = lazyregexp.New(`^([1-9][0-9]*)\.(0|[1-9][0-9]*)$`) -func (f *File) add(errs *ErrorList, line *Line, verb string, args []string, fix VersionFixer, strict bool) { +func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, args []string, fix VersionFixer, strict bool) { // If strict is false, this module is a dependency. // We ignore all unknown directives as well as main-module-only // directives like replace and exclude. It will work better for @@ -186,7 +203,7 @@ func (f *File) add(errs *ErrorList, line *Line, verb string, args []string, fix // and simply ignore those statements. if !strict { switch verb { - case "module", "require", "go": + case "go", "module", "retract", "require": // want these even for dependency go.mods default: return @@ -232,6 +249,7 @@ func (f *File) add(errs *ErrorList, line *Line, verb string, args []string, fix f.Go = &Go{Syntax: line} f.Go.Version = args[0] + case "module": if f.Module != nil { errorf("repeated module statement") @@ -248,6 +266,7 @@ func (f *File) add(errs *ErrorList, line *Line, verb string, args []string, fix return } f.Module.Mod = module.Version{Path: s} + case "require", "exclude": if len(args) != 2 { errorf("usage: %s module/path v1.2.3", verb) @@ -284,6 +303,7 @@ func (f *File) add(errs *ErrorList, line *Line, verb string, args []string, fix Syntax: line, }) } + case "replace": arrow := 2 if len(args) >= 2 && args[1] == "=>" { @@ -347,6 +367,33 @@ func (f *File) add(errs *ErrorList, line *Line, verb string, args []string, fix New: module.Version{Path: ns, Version: nv}, Syntax: line, }) + + case "retract": + rationale := parseRetractRationale(block, line) + vi, err := parseVersionInterval(verb, &args, fix) + if err != nil { + if strict { + wrapError(err) + return + } else { + // Only report errors parsing intervals in the main module. We may + // support additional syntax in the future, such as open and half-open + // intervals. Those can't be supported now, because they break the + // go.mod parser, even in lax mode. + return + } + } + if len(args) > 0 && strict { + // In the future, there may be additional information after the version. + errorf("unexpected token after version: %q", args[0]) + return + } + retract := &Retract{ + VersionInterval: vi, + Rationale: rationale, + Syntax: line, + } + f.Retract = append(f.Retract, retract) } } @@ -444,6 +491,53 @@ func AutoQuote(s string) string { return s } +func parseVersionInterval(verb string, args *[]string, fix VersionFixer) (VersionInterval, error) { + toks := *args + if len(toks) == 0 || toks[0] == "(" { + return VersionInterval{}, fmt.Errorf("expected '[' or version") + } + if toks[0] != "[" { + v, err := parseVersion(verb, "", &toks[0], fix) + if err != nil { + return VersionInterval{}, err + } + *args = toks[1:] + return VersionInterval{Low: v, High: v}, nil + } + toks = toks[1:] + + if len(toks) == 0 { + return VersionInterval{}, fmt.Errorf("expected version after '['") + } + low, err := parseVersion(verb, "", &toks[0], fix) + if err != nil { + return VersionInterval{}, err + } + toks = toks[1:] + + if len(toks) == 0 || toks[0] != "," { + return VersionInterval{}, fmt.Errorf("expected ',' after version") + } + toks = toks[1:] + + if len(toks) == 0 { + return VersionInterval{}, fmt.Errorf("expected version after ','") + } + high, err := parseVersion(verb, "", &toks[0], fix) + if err != nil { + return VersionInterval{}, err + } + toks = toks[1:] + + if len(toks) == 0 || toks[0] != "]" { + return VersionInterval{}, fmt.Errorf("expected ']' after version") + } + toks = toks[1:] + + *args = toks + return VersionInterval{Low: low, High: high}, nil +} + func parseString(s *string) (string, error) { t := *s if strings.HasPrefix(t, `"`) { @@ -461,6 +555,27 @@ func parseString(s *string) (string, error) { return t, nil } +// parseRetractRationale extracts the rationale for a retract directive from the +// surrounding comments. If the line does not have comments and is part of a +// block that does have comments, the block's comments are used. +func parseRetractRationale(block *LineBlock, line *Line) string { + comments := line.Comment() + if block != nil && len(comments.Before) == 0 && len(comments.Suffix) == 0 { + comments = block.Comment() + } + groups := [][]Comment{comments.Before, comments.Suffix} + var lines []string + for _, g := range groups { + for _, c := range g { + if !strings.HasPrefix(c.Token, "//") { + continue // blank line + } + lines = append(lines, strings.TrimSpace(strings.TrimPrefix(c.Token, "//"))) + } + } + return strings.Join(lines, "\n") +} + type ErrorList []Error func (e ErrorList) Error() string { @@ -494,6 +609,8 @@ func (e *Error) Error() string { var directive string if e.ModPath != "" { directive = fmt.Sprintf("%s %s: ", e.Verb, e.ModPath) + } else if e.Verb != "" { + directive = fmt.Sprintf("%s: ", e.Verb) } return pos + directive + e.Err.Error() @@ -585,6 +702,15 @@ func (f *File) Cleanup() { } f.Replace = f.Replace[:w] + w = 0 + for _, r := range f.Retract { + if r.Low != "" || r.High != "" { + f.Retract[w] = r + w++ + } + } + f.Retract = f.Retract[:w] + f.Syntax.Cleanup() } @@ -778,6 +904,34 @@ func (f *File) DropReplace(oldPath, oldVers string) error { return nil } +func (f *File) AddRetract(vi VersionInterval, rationale string) error { + r := &Retract{ + VersionInterval: vi, + } + if vi.Low == vi.High { + r.Syntax = f.Syntax.addLine(nil, "retract", AutoQuote(vi.Low)) + } else { + r.Syntax = f.Syntax.addLine(nil, "retract", "[", AutoQuote(vi.Low), ",", AutoQuote(vi.High), "]") + } + if rationale != "" { + for _, line := range strings.Split(rationale, "\n") { + com := Comment{Token: "// " + line} + r.Syntax.Comment().Before = append(r.Syntax.Comment().Before, com) + } + } + return nil +} + +func (f *File) DropRetract(vi VersionInterval) error { + for _, r := range f.Retract { + if r.VersionInterval == vi { + f.Syntax.removeLine(r.Syntax) + *r = Retract{} + } + } + return nil +} + func (f *File) SortBlocks() { f.removeDups() // otherwise sorting is unsafe @@ -786,28 +940,38 @@ func (f *File) SortBlocks() { if !ok { continue } - sort.Slice(block.Line, func(i, j int) bool { - li := block.Line[i] - lj := block.Line[j] - for k := 0; k < len(li.Token) && k < len(lj.Token); k++ { - if li.Token[k] != lj.Token[k] { - return li.Token[k] < lj.Token[k] - } - } - return len(li.Token) < len(lj.Token) + less := lineLess + if block.Token[0] == "retract" { + less = lineRetractLess + } + sort.SliceStable(block.Line, func(i, j int) bool { + return less(block.Line[i], block.Line[j]) }) } } +// removeDups removes duplicate exclude and replace directives. +// +// Earlier exclude directives take priority. +// +// Later replace directives take priority. +// +// require directives are not de-duplicated. That's left up to higher-level +// logic (MVS). +// +// retract directives are not de-duplicated since comments are +// meaningful, and versions may be retracted multiple times. func (f *File) removeDups() { - have := make(map[module.Version]bool) kill := make(map[*Line]bool) + + // Remove duplicate excludes. + haveExclude := make(map[module.Version]bool) for _, x := range f.Exclude { - if have[x.Mod] { + if haveExclude[x.Mod] { kill[x.Syntax] = true continue } - have[x.Mod] = true + haveExclude[x.Mod] = true } var excl []*Exclude for _, x := range f.Exclude { @@ -817,15 +981,16 @@ func (f *File) removeDups() { } f.Exclude = excl - have = make(map[module.Version]bool) + // Remove duplicate replacements. // Later replacements take priority over earlier ones. + haveReplace := make(map[module.Version]bool) for i := len(f.Replace) - 1; i >= 0; i-- { x := f.Replace[i] - if have[x.Old] { + if haveReplace[x.Old] { kill[x.Syntax] = true continue } - have[x.Old] = true + haveReplace[x.Old] = true } var repl []*Replace for _, x := range f.Replace { @@ -835,6 +1000,9 @@ func (f *File) removeDups() { } f.Replace = repl + // Duplicate require and retract directives are not removed. + + // Drop killed statements from the syntax tree. var stmts []Expr for _, stmt := range f.Syntax.Stmt { switch stmt := stmt.(type) { @@ -858,3 +1026,38 @@ func (f *File) removeDups() { } f.Syntax.Stmt = stmts } + +// lineLess returns whether li should be sorted before lj. It sorts +// lexicographically without assigning any special meaning to tokens. +func lineLess(li, lj *Line) bool { + for k := 0; k < len(li.Token) && k < len(lj.Token); k++ { + if li.Token[k] != lj.Token[k] { + return li.Token[k] < lj.Token[k] + } + } + return len(li.Token) < len(lj.Token) +} + +// lineRetractLess returns whether li should be sorted before lj for lines in +// a "retract" block. It treats each line as a version interval. Single versions +// are compared as if they were intervals with the same low and high version. +// Intervals are sorted in descending order, first by low version, then by +// high version, using semver.Compare. +func lineRetractLess(li, lj *Line) bool { + interval := func(l *Line) VersionInterval { + if len(l.Token) == 1 { + return VersionInterval{Low: l.Token[0], High: l.Token[0]} + } else if len(l.Token) == 5 && l.Token[0] == "[" && l.Token[2] == "," && l.Token[4] == "]" { + return VersionInterval{Low: l.Token[1], High: l.Token[3]} + } else { + // Line in unknown format. Treat as an invalid version. + return VersionInterval{} + } + } + vii := interval(li) + vij := interval(lj) + if cmp := semver.Compare(vii.Low, vij.Low); cmp != 0 { + return cmp > 0 + } + return semver.Compare(vii.High, vij.High) > 0 +} diff --git a/vendor/golang.org/x/mod/module/module.go b/vendor/golang.org/x/mod/module/module.go index 6cd3728..c1c5263 100644 --- a/vendor/golang.org/x/mod/module/module.go +++ b/vendor/golang.org/x/mod/module/module.go @@ -97,6 +97,7 @@ package module import ( "fmt" + "path" "sort" "strings" "unicode" @@ -224,13 +225,13 @@ func firstPathOK(r rune) bool { } // pathOK reports whether r can appear in an import path element. -// Paths can be ASCII letters, ASCII digits, and limited ASCII punctuation: + - . _ and ~. +// Paths can be ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~. // This matches what "go get" has historically recognized in import paths. // TODO(rsc): We would like to allow Unicode letters, but that requires additional // care in the safe encoding (see "escaped paths" above). func pathOK(r rune) bool { if r < utf8.RuneSelf { - return r == '+' || r == '-' || r == '.' || r == '_' || r == '~' || + return r == '-' || r == '.' || r == '_' || r == '~' || '0' <= r && r <= '9' || 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' @@ -313,11 +314,13 @@ func CheckPath(path string) error { // separated by slashes (U+002F). (It must not begin with nor end in a slash.) // // A valid path element is a non-empty string made up of -// ASCII letters, ASCII digits, and limited ASCII punctuation: + - . _ and ~. +// ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~. // It must not begin or end with a dot (U+002E), nor contain two dots in a row. // // The element prefix up to the first dot must not be a reserved file name -// on Windows, regardless of case (CON, com1, NuL, and so on). +// on Windows, regardless of case (CON, com1, NuL, and so on). The element +// must not have a suffix of a tilde followed by one or more ASCII digits +// (to exclude paths elements that look like Windows short-names). // // CheckImportPath may be less restrictive in the future, but see the // top-level package documentation for additional information about @@ -402,6 +405,29 @@ func checkElem(elem string, fileName bool) error { return fmt.Errorf("%q disallowed as path element component on Windows", short) } } + + if fileName { + // don't check for Windows short-names in file names. They're + // only an issue for import paths. + return nil + } + + // Reject path components that look like Windows short-names. + // Those usually end in a tilde followed by one or more ASCII digits. + if tilde := strings.LastIndexByte(short, '~'); tilde >= 0 && tilde < len(short)-1 { + suffix := short[tilde+1:] + suffixIsDigits := true + for _, r := range suffix { + if r < '0' || r > '9' { + suffixIsDigits = false + break + } + } + if suffixIsDigits { + return fmt.Errorf("trailing tilde and digits in path element") + } + } + return nil } @@ -716,3 +742,49 @@ func unescapeString(escaped string) (string, bool) { } return string(buf), true } + +// MatchPrefixPatterns reports whether any path prefix of target matches one of +// the glob patterns (as defined by path.Match) in the comma-separated globs +// list. This implements the algorithm used when matching a module path to the +// GOPRIVATE environment variable, as described by 'go help module-private'. +// +// It ignores any empty or malformed patterns in the list. +func MatchPrefixPatterns(globs, target string) bool { + for globs != "" { + // Extract next non-empty glob in comma-separated list. + var glob string + if i := strings.Index(globs, ","); i >= 0 { + glob, globs = globs[:i], globs[i+1:] + } else { + glob, globs = globs, "" + } + if glob == "" { + continue + } + + // A glob with N+1 path elements (N slashes) needs to be matched + // against the first N+1 path elements of target, + // which end just before the N+1'th slash. + n := strings.Count(glob, "/") + prefix := target + // Walk target, counting slashes, truncating at the N+1'th slash. + for i := 0; i < len(target); i++ { + if target[i] == '/' { + if n == 0 { + prefix = target[:i] + break + } + n-- + } + } + if n > 0 { + // Not enough prefix elements. + continue + } + matched, _ := path.Match(glob, prefix) + if matched { + return true + } + } + return false +} diff --git a/vendor/golang.org/x/mod/semver/semver.go b/vendor/golang.org/x/mod/semver/semver.go index 2988e3c..4338f35 100644 --- a/vendor/golang.org/x/mod/semver/semver.go +++ b/vendor/golang.org/x/mod/semver/semver.go @@ -138,6 +138,9 @@ func Compare(v, w string) int { // Max canonicalizes its arguments and then returns the version string // that compares greater. +// +// Deprecated: use Compare instead. In most cases, returning a canonicalized +// version is not expected or desired. func Max(v, w string) string { v = Canonical(v) w = Canonical(w) diff --git a/vendor/golang.org/x/net/html/const.go b/vendor/golang.org/x/net/html/const.go index 73804d3..ff7acf2 100644 --- a/vendor/golang.org/x/net/html/const.go +++ b/vendor/golang.org/x/net/html/const.go @@ -52,7 +52,7 @@ var isSpecialElementMap = map[string]bool{ "iframe": true, "img": true, "input": true, - "keygen": true, + "keygen": true, // "keygen" has been removed from the spec, but are kept here for backwards compatibility. "li": true, "link": true, "listing": true, diff --git a/vendor/golang.org/x/net/html/foreign.go b/vendor/golang.org/x/net/html/foreign.go index 74774c4..9da9e9d 100644 --- a/vendor/golang.org/x/net/html/foreign.go +++ b/vendor/golang.org/x/net/html/foreign.go @@ -161,65 +161,62 @@ var mathMLAttributeAdjustments = map[string]string{ } var svgAttributeAdjustments = map[string]string{ - "attributename": "attributeName", - "attributetype": "attributeType", - "basefrequency": "baseFrequency", - "baseprofile": "baseProfile", - "calcmode": "calcMode", - "clippathunits": "clipPathUnits", - "contentscripttype": "contentScriptType", - "contentstyletype": "contentStyleType", - "diffuseconstant": "diffuseConstant", - "edgemode": "edgeMode", - "externalresourcesrequired": "externalResourcesRequired", - "filterunits": "filterUnits", - "glyphref": "glyphRef", - "gradienttransform": "gradientTransform", - "gradientunits": "gradientUnits", - "kernelmatrix": "kernelMatrix", - "kernelunitlength": "kernelUnitLength", - "keypoints": "keyPoints", - "keysplines": "keySplines", - "keytimes": "keyTimes", - "lengthadjust": "lengthAdjust", - "limitingconeangle": "limitingConeAngle", - "markerheight": "markerHeight", - "markerunits": "markerUnits", - "markerwidth": "markerWidth", - "maskcontentunits": "maskContentUnits", - "maskunits": "maskUnits", - "numoctaves": "numOctaves", - "pathlength": "pathLength", - "patterncontentunits": "patternContentUnits", - "patterntransform": "patternTransform", - "patternunits": "patternUnits", - "pointsatx": "pointsAtX", - "pointsaty": "pointsAtY", - "pointsatz": "pointsAtZ", - "preservealpha": "preserveAlpha", - "preserveaspectratio": "preserveAspectRatio", - "primitiveunits": "primitiveUnits", - "refx": "refX", - "refy": "refY", - "repeatcount": "repeatCount", - "repeatdur": "repeatDur", - "requiredextensions": "requiredExtensions", - "requiredfeatures": "requiredFeatures", - "specularconstant": "specularConstant", - "specularexponent": "specularExponent", - "spreadmethod": "spreadMethod", - "startoffset": "startOffset", - "stddeviation": "stdDeviation", - "stitchtiles": "stitchTiles", - "surfacescale": "surfaceScale", - "systemlanguage": "systemLanguage", - "tablevalues": "tableValues", - "targetx": "targetX", - "targety": "targetY", - "textlength": "textLength", - "viewbox": "viewBox", - "viewtarget": "viewTarget", - "xchannelselector": "xChannelSelector", - "ychannelselector": "yChannelSelector", - "zoomandpan": "zoomAndPan", + "attributename": "attributeName", + "attributetype": "attributeType", + "basefrequency": "baseFrequency", + "baseprofile": "baseProfile", + "calcmode": "calcMode", + "clippathunits": "clipPathUnits", + "diffuseconstant": "diffuseConstant", + "edgemode": "edgeMode", + "filterunits": "filterUnits", + "glyphref": "glyphRef", + "gradienttransform": "gradientTransform", + "gradientunits": "gradientUnits", + "kernelmatrix": "kernelMatrix", + "kernelunitlength": "kernelUnitLength", + "keypoints": "keyPoints", + "keysplines": "keySplines", + "keytimes": "keyTimes", + "lengthadjust": "lengthAdjust", + "limitingconeangle": "limitingConeAngle", + "markerheight": "markerHeight", + "markerunits": "markerUnits", + "markerwidth": "markerWidth", + "maskcontentunits": "maskContentUnits", + "maskunits": "maskUnits", + "numoctaves": "numOctaves", + "pathlength": "pathLength", + "patterncontentunits": "patternContentUnits", + "patterntransform": "patternTransform", + "patternunits": "patternUnits", + "pointsatx": "pointsAtX", + "pointsaty": "pointsAtY", + "pointsatz": "pointsAtZ", + "preservealpha": "preserveAlpha", + "preserveaspectratio": "preserveAspectRatio", + "primitiveunits": "primitiveUnits", + "refx": "refX", + "refy": "refY", + "repeatcount": "repeatCount", + "repeatdur": "repeatDur", + "requiredextensions": "requiredExtensions", + "requiredfeatures": "requiredFeatures", + "specularconstant": "specularConstant", + "specularexponent": "specularExponent", + "spreadmethod": "spreadMethod", + "startoffset": "startOffset", + "stddeviation": "stdDeviation", + "stitchtiles": "stitchTiles", + "surfacescale": "surfaceScale", + "systemlanguage": "systemLanguage", + "tablevalues": "tableValues", + "targetx": "targetX", + "targety": "targetY", + "textlength": "textLength", + "viewbox": "viewBox", + "viewtarget": "viewTarget", + "xchannelselector": "xChannelSelector", + "ychannelselector": "yChannelSelector", + "zoomandpan": "zoomAndPan", } diff --git a/vendor/golang.org/x/net/html/parse.go b/vendor/golang.org/x/net/html/parse.go index 2cd12fc..f91466f 100644 --- a/vendor/golang.org/x/net/html/parse.go +++ b/vendor/golang.org/x/net/html/parse.go @@ -728,7 +728,13 @@ func inHeadNoscriptIM(p *parser) bool { return inBodyIM(p) case a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Style: return inHeadIM(p) - case a.Head, a.Noscript: + case a.Head: + // Ignore the token. + return true + case a.Noscript: + // Don't let the tokenizer go into raw text mode even when a