add more issue / pr creation params #331

Merged
6543 merged 18 commits from noerw/tea:issue-create-opts into master 2021-03-08 11:48:04 +00:00
433 changed files with 18268 additions and 5681 deletions
Showing only changes of commit ecb0945af8 - Show all commits

View File

@ -64,7 +64,7 @@ func runReleaseDelete(cmd *cli.Context) error {
}
if ctx.Bool("delete-tag") {
_, err = client.DeleteReleaseTag(ctx.Owner, ctx.Repo, tag)
_, err = client.DeleteTag(ctx.Owner, ctx.Repo, tag)
return err
}

35
go.mod
View File

@ -4,36 +4,33 @@ go 1.13
require (
code.gitea.io/gitea-vet v0.2.1
code.gitea.io/sdk/gitea v0.13.1-0.20201217101417-97e61e5a8a5f
code.gitea.io/sdk/gitea v0.13.1-0.20210304201955-ff82113459b5
gitea.com/noerw/unidiff-comments v0.0.0-20201219085024-64aec5658f2b
github.com/AlecAivazis/survey/v2 v2.2.7
github.com/Microsoft/go-winio v0.4.15 // indirect
github.com/adrg/xdg v0.2.3
github.com/alecthomas/chroma v0.8.1 // indirect
github.com/araddon/dateparse v0.0.0-20201001162425-8aadafed4dc4
github.com/AlecAivazis/survey/v2 v2.2.8
github.com/Microsoft/go-winio v0.4.16 // indirect
github.com/adrg/xdg v0.3.1
github.com/araddon/dateparse v0.0.0-20210207001429-0eec95c9db7e
github.com/charmbracelet/glamour v0.2.0
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
github.com/dlclark/regexp2 v1.4.0 // indirect
github.com/go-git/go-git/v5 v5.2.0
github.com/imdario/mergo v0.3.11 // indirect
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-colorable v0.1.8 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/microcosm-cc/bluemonday v1.0.4 // indirect
github.com/muesli/reflow v0.2.0 // indirect
github.com/muesli/termenv v0.7.4
github.com/olekukonko/tablewriter v0.0.4
github.com/olekukonko/tablewriter v0.0.5
github.com/rivo/uniseg v0.2.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966
github.com/stretchr/testify v1.6.1
github.com/stretchr/testify v1.7.0
github.com/urfave/cli/v2 v2.3.0
github.com/xanzy/ssh-agent v0.3.0 // indirect
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102 // indirect
golang.org/x/sys v0.0.0-20201221093633-bc327ba9c2f0 // indirect
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf // indirect
golang.org/x/text v0.3.4 // indirect
golang.org/x/tools v0.0.0-20201105220310-78b158585360 // indirect
gopkg.in/yaml.v2 v2.3.0
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 // indirect
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 // indirect
golang.org/x/sys v0.0.0-20210305034016-7844c3c200c3 // indirect
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d // indirect
golang.org/x/text v0.3.5 // indirect
golang.org/x/tools v0.1.0 // indirect
gopkg.in/yaml.v2 v2.4.0
)

112
go.sum
View File

@ -1,28 +1,25 @@
code.gitea.io/gitea-vet v0.2.1 h1:b30by7+3SkmiftK0RjuXqFvZg2q4p68uoPGuxhzBN0s=
code.gitea.io/gitea-vet v0.2.1/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE=
code.gitea.io/sdk/gitea v0.13.1-0.20201217101417-97e61e5a8a5f h1:v+cKQhO5BFcUVZN73CaKPgM3yudiT8U1DYetMS6l1tE=
code.gitea.io/sdk/gitea v0.13.1-0.20201217101417-97e61e5a8a5f/go.mod h1:89WiyOX1KEcvjP66sRHdu0RafojGo60bT9UqW17VbWs=
code.gitea.io/sdk/gitea v0.13.1-0.20210304201955-ff82113459b5 h1:va0KddYHN8bH6MCUaWf5e4p+il55blUw5J0ha5vTMaQ=
code.gitea.io/sdk/gitea v0.13.1-0.20210304201955-ff82113459b5/go.mod h1:89WiyOX1KEcvjP66sRHdu0RafojGo60bT9UqW17VbWs=
gitea.com/noerw/unidiff-comments v0.0.0-20201219085024-64aec5658f2b h1:CLYsMGcGLohESQDMth+RgJ4cB3CCHToxnj0zBbvB3sE=
gitea.com/noerw/unidiff-comments v0.0.0-20201219085024-64aec5658f2b/go.mod h1:Fc8iyPm4NINRWujeIk2bTfcbGc4ZYY29/oMAAGcr4qI=
github.com/AlecAivazis/survey/v2 v2.2.7 h1:5NbxkF4RSKmpywYdcRgUmos1o+roJY8duCLZXbVjoig=
github.com/AlecAivazis/survey/v2 v2.2.7/go.mod h1:9DYvHgXtiXm6nCn+jXnOXLKbH+Yo9u8fAS/SduGdoPk=
github.com/AlecAivazis/survey/v2 v2.2.8 h1:TgxCwybKdBckmC+/P9/5h49rw/nAHe/itZL0dgHs+Q0=
github.com/AlecAivazis/survey/v2 v2.2.8/go.mod h1:9DYvHgXtiXm6nCn+jXnOXLKbH+Yo9u8fAS/SduGdoPk=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/Microsoft/go-winio v0.4.14 h1:+hMXMk01us9KgxGb7ftKQt2Xpf5hH/yky+TDA+qxleU=
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
github.com/Microsoft/go-winio v0.4.15 h1:qkLXKzb1QoVatRyd/YlXZ/Kg0m5K3SPuoD82jjSOaBc=
github.com/Microsoft/go-winio v0.4.15/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/go-winio v0.4.16 h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk=
github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=
github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8 h1:xzYJEypr/85nBpB11F9br+3HUrpgb+fcm5iADzXXYEw=
github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8/go.mod h1:oX5x61PbNXchhh0oikYAH+4Pcfw5LKv21+Jnpr6r6Pc=
github.com/adrg/xdg v0.2.3 h1:GxXngdYxNDkoUvZXjNJGwqZxWXi43MKbOOlA/00qZi4=
github.com/adrg/xdg v0.2.3/go.mod h1:7I2hH/IT30IsupOpKZ5ue7/qNi3CoKzD6tL3HwpaRMQ=
github.com/adrg/xdg v0.3.1 h1:uIyL9BYfXaFgDyVRKE8wjtm6ETQULweQqTofphEFJYY=
github.com/adrg/xdg v0.3.1/go.mod h1:7I2hH/IT30IsupOpKZ5ue7/qNi3CoKzD6tL3HwpaRMQ=
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
github.com/alecthomas/chroma v0.7.3 h1:NfdAERMy+esYQs8OXk0I868/qDxxCEo7FMz1WIqMAeI=
github.com/alecthomas/chroma v0.7.3/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
github.com/alecthomas/chroma v0.8.1 h1:ym20sbvyC6RXz45u4qDglcgr8E313oPROshcuCHqiEE=
github.com/alecthomas/chroma v0.8.1/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
@ -30,17 +27,12 @@ github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkx
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/araddon/dateparse v0.0.0-20201001162425-8aadafed4dc4 h1:OkS1BqB3CzLtGRznRyvriSY8jeaVk2CrDn2ZiRQgMUI=
github.com/araddon/dateparse v0.0.0-20201001162425-8aadafed4dc4/go.mod h1:hMAUZFIkk4B1FouGxqlogyMyU6BwY/UiVmmbbzz9Up8=
github.com/araddon/dateparse v0.0.0-20210207001429-0eec95c9db7e h1:OjdSMCht0ZVX7IH0nTdf00xEustvbtUGRgMh3gbdmOg=
github.com/araddon/dateparse v0.0.0-20210207001429-0eec95c9db7e/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/charmbracelet/glamour v0.2.0 h1:mTgaiNiumpqTZp3qVM6DH9UB0NlbY17wejoMf1kM8Pg=
github.com/charmbracelet/glamour v0.2.0/go.mod h1:UA27Kwj3QHialP74iU6C+Gpc8Y7IOAKupeKMLLBURWM=
github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU=
github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
@ -52,8 +44,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
@ -70,15 +60,11 @@ github.com/go-git/go-git/v5 v5.2.0 h1:YPBLG/3UK1we1ohRkncLjaXWLW+HKp5QNM/jTli2Jg
github.com/go-git/go-git/v5 v5.2.0/go.mod h1:kh02eMX+wdqqxgNMEyq8YgwlIOsDOa9homkUq1PoTMs=
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f h1:5CjVwnuUcp5adK4gmY6i72gpVFVnZDP2h5TmPScB6u4=
github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f/go.mod h1:nOFQdrUlIlx6M6ODdSpBj1NVA+VgLC6kmw60mkw34H4=
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
github.com/hashicorp/go-version v1.2.1 h1:zEfKbn2+PDgroKdiOzqiE8rsmLqU2uwi5PB5pBJ3TkI=
github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174 h1:WlZsjVhE8Af9IcZDGgJGQpNflI3+MJSBhsgT5PCtzBQ=
github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174/go.mod h1:DqJ97dSdRW1W22yXSB90986pcOyQ7r45iio1KN2ez1A=
github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg=
github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA=
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
@ -87,63 +73,56 @@ github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY=
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck=
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.4 h1:5Myjjh3JY/NaAi4IsUbHADytDyl1VE1Y9PXDlL+P/VQ=
github.com/kr/pty v1.1.4/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lucasb-eyer/go-colorful v1.0.3 h1:QIbQXiugsb+q10B+MI+7DI1oQLdmnep86tWFlaaUAac=
github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8=
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-runewidth v0.0.7 h1:Ei8KR0497xHyKJPAv59M1dkC+rOZCMBJ+t3fZ+twI54=
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=
github.com/mattn/go-runewidth v0.0.10 h1:CoZ3S2P7pvtP45xOtBw+/mDL2z0RKI576gSkzRRpdGg=
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/microcosm-cc/bluemonday v1.0.2 h1:5lPfLTTAvAbtS0VqT+94yOtFnGfUWYyx0+iToC3Os3s=
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
github.com/microcosm-cc/bluemonday v1.0.4 h1:p0L+CTpo/PLFdkoPcJemLXG+fpMD7pYOoDEq1axMbGg=
github.com/microcosm-cc/bluemonday v1.0.4/go.mod h1:8iwZnFn2CDDNZ0r6UXhF4xawGvzaqzCRa1n3/lO3W2w=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/muesli/reflow v0.1.0 h1:oQdpLfO56lr5pgLvqD0TcjW85rDjSYSBVdiG1Ch1ddM=
github.com/muesli/reflow v0.1.0/go.mod h1:I9bWAt7QTg/que/qmUCJBGlj7wEq8OAFBjPNjc6xK4I=
github.com/muesli/reflow v0.2.0 h1:2o0UBJPHHH4fa2GCXU4Rg4DwOtWPMekCeyc5EWbAQp0=
github.com/muesli/reflow v0.2.0/go.mod h1:qT22vjVmM9MIUeLgsVYe/Ye7eZlbv9dZjL3dVhUqLX8=
github.com/muesli/termenv v0.6.0 h1:zxvzTBmo4ZcxhNGGWeMz+Tttm51eF5bmPjfy4MCRYlk=
github.com/muesli/termenv v0.6.0/go.mod h1:SohX91w6swWA4AYU+QmPx+aSgXhWO0juiyID9UZmbpA=
github.com/muesli/termenv v0.7.4 h1:/pBqvU5CpkY53tU0vVn+xgs2ZTX63aH5nY+SSps5Xa8=
github.com/muesli/termenv v0.7.4/go.mod h1:pZ7qY9l3F7e5xsAOS0zCew2tME+p7bWeBkotCEcIIcc=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=
github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
@ -153,30 +132,25 @@ github.com/seletskiy/tplutil v0.0.0-20200921103632-f880f6245597/go.mod h1:F8CBHS
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA=
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M=
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
github.com/xanzy/ssh-agent v0.2.1 h1:TCbipTQL2JiiCprBWx9frJ2eJlCYT00NmctrHxVAr70=
github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4=
github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=
github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.0 h1:WOOcyaJPlzb8fZ8TloxFe8QZkhOOJx87leDa9MIT9dc=
github.com/yuin/goldmark v1.2.0/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
@ -184,12 +158,10 @@ golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnf
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073 h1:xMPOj6Pz6UipU1wXLkrtqpHbR0AVFnyPEQq/wRWz9lM=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620 h1:3wPMTskHO3+O6jqTEXyFcsnuxMQOqYSaHsDxcbUXpqA=
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ=
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83 h1:/ZScEX8SfEmUGRHs0gxpqteO5nfNW6axyZbBdw9A12g=
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
@ -197,12 +169,10 @@ golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a h1:GuSPYbZzB5/dcLNCwLQLsg3obCJtX9IJhpXkvY7kzk0=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20201021035429-f5854403a974 h1:IX6qOQeG5uLjB/hjjwjedwfjND0hgjPMMyO1RoIXQNI=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102 h1:42cLlJJdEh+ySyeUUbEQ5bsTiq8voBeTuweGVkY6Puw=
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -217,35 +187,29 @@ golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201221093633-bc327ba9c2f0 h1:n+DPcgTwkgWzIFpLmoimYR2K2b0Ga5+Os4kayIN0vGo=
golang.org/x/sys v0.0.0-20201221093633-bc327ba9c2f0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210305034016-7844c3c200c3 h1:RdE7htvBru4I4VZQofQjCZk5W9+aLNlSF5n0zgVwm8s=
golang.org/x/sys v0.0.0-20210305034016-7844c3c200c3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf h1:MZ2shdL+ZM/XzY3ZGOnh4Nlpnxz5GSOhOmtHo3iPU6M=
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d h1:SZxvLBoTP5yHO3Frd4z4vrF+DBX9vMVanchswa69toE=
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224 h1:azwY/v0y0K4mFHVsg5+UrTgchqALYWpqVo6vL5OmkmI=
golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20201105220310-78b158585360 h1:/9CzsU8hOpnSUCtem1vfWNgsVeCTgkMdx+VE5YIYxnU=
golang.org/x/tools v0.0.0-20201105220310-78b158585360/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@ -255,13 +219,11 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8X
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ=
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@ -20,15 +20,19 @@ import (
"github.com/urfave/cli/v2"
)
var (
errNotAGiteaRepo = errors.New("No Gitea login found. You might want to specify --repo (and --login) to work outside of a repository")
)
// TeaContext contains all context derived during command initialization and wraps cli.Context
type TeaContext struct {
*cli.Context
Login *config.Login
RepoSlug string // <owner>/<repo>
Owner string // repo owner as derived from context
Repo string // repo name as derived from context or provided in flag
Output string // value of output flag
LocalRepo *git.TeaRepo // maybe, we have opened it already anyway
Login *config.Login // config data & client for selected login
RepoSlug string // <owner>/<repo>, optional
Owner string // repo owner as derived from context or provided in flag, optional
Repo string // repo name as derived from context or provided in flag, optional
Output string // value of output flag
LocalRepo *git.TeaRepo // is set if flags specified a local repo via --repo, or if $PWD is a git repo
}
// GetListOptions return ListOptions based on PaginationFlags
@ -84,8 +88,7 @@ func InitCommand(ctx *cli.Context) *TeaContext {
// check if repoFlag can be interpreted as path to local repo.
if len(repoFlag) != 0 {
repoFlagPathExists, err = utils.DirExists(repoFlag)
if err != nil {
if repoFlagPathExists, err = utils.DirExists(repoFlag); err != nil {
log.Fatal(err.Error())
}
if repoFlagPathExists {
@ -95,9 +98,12 @@ func InitCommand(ctx *cli.Context) *TeaContext {
if len(repoFlag) == 0 || repoFlagPathExists {
// try to read git repo & extract context, ignoring if PWD is not a repo
c.LocalRepo, c.Login, c.RepoSlug, err = contextFromLocalRepo(repoPath, remoteFlag)
if err != nil && err != gogit.ErrRepositoryNotExists {
log.Fatal(err.Error())
if c.LocalRepo, c.Login, c.RepoSlug, err = contextFromLocalRepo(repoPath, remoteFlag); err != nil {
if err == errNotAGiteaRepo || err == gogit.ErrRepositoryNotExists {
// we can deal with that, commands needing the optional values use ctx.Ensure()
} else {
log.Fatal(err.Error())
}
}
}
@ -187,5 +193,5 @@ func contextFromLocalRepo(repoPath, remoteValue string) (*git.TeaRepo, *config.L
}
}
return repo, nil, "", errors.New("No Gitea login found. You might want to specify --repo (and --login) to work outside of a repository")
return repo, nil, "", errNotAGiteaRepo
}

View File

@ -223,5 +223,5 @@ func (r TeaRepo) TeaGetCurrentBranchName() (string, error) {
return "", fmt.Errorf("active ref is no branch")
}
return strings.TrimPrefix(localHead.Name().String(), "refs/heads/"), nil
return localHead.Name().Short(), nil
}

View File

@ -13,8 +13,6 @@ import (
local_git "code.gitea.io/tea/modules/git"
"code.gitea.io/tea/modules/print"
"code.gitea.io/tea/modules/utils"
"github.com/go-git/go-git/v5"
)
// CreatePull creates a PR in the given repo and prints the result
@ -25,13 +23,6 @@ func CreatePull(login *config.Login, repoOwner, repoName, base, head string, opt
return fmt.Errorf("Could not open local repo: %s", err)
}
// push if possible
fmt.Println("git push")
err = localRepo.Push(&git.PushOptions{})
if err != nil && err != git.NoErrAlreadyUpToDate {
fmt.Printf("Error occurred during 'git push':\n%s\n", err.Error())
}
// default is default branch
if len(base) == 0 {
base, err = GetDefaultPRBase(login, repoOwner, repoName)
@ -95,32 +86,23 @@ func GetDefaultPRBase(login *config.Login, owner, repo string) (string, error) {
return meta.DefaultBranch, nil
}
// GetDefaultPRHead uses the currently checked out branch, checks if
// a remote currently holds the commit it points to, extracts the owner
// from its URL, and assembles the result to a valid head spec for gitea.
// GetDefaultPRHead uses the currently checked out branch, tries to find a remote
// that has a branch with the same name, and extracts the owner from its URL.
// If no remote matches, owner is empty, meaning same as head repo owner.
func GetDefaultPRHead(localRepo *local_git.TeaRepo) (owner, branch string, err error) {
headBranch, err := localRepo.Head()
if err != nil {
if branch, err = localRepo.TeaGetCurrentBranchName(); err != nil {
return
}
sha := headBranch.Hash().String()
remote, err := localRepo.TeaFindBranchRemote("", sha)
remote, err := localRepo.TeaFindBranchRemote(branch, "")
if err != nil {
err = fmt.Errorf("could not determine remote for current branch: %s", err)
return
}
if remote == nil {
// if no remote branch is found for the local hash, we abort:
// user has probably not configured a remote for the local branch,
// or local branch does not represent remote state.
err = fmt.Errorf("no matching remote found for this branch. try git push -u <remote> <branch>")
return
}
branch, err = localRepo.TeaGetCurrentBranchName()
if err != nil {
// if no remote branch is found for the local branch,
// we leave owner empty, meaning "use same repo as head" to gitea.
return
}

View File

@ -63,21 +63,21 @@ func (c *Client) AdminCreateUser(opt CreateUserOption) (*User, *Response, error)
// EditUserOption edit user options
type EditUserOption struct {
SourceID int64 `json:"source_id"`
LoginName string `json:"login_name"`
FullName string `json:"full_name"`
Email string `json:"email"`
Password string `json:"password"`
MustChangePassword *bool `json:"must_change_password"`
Website string `json:"website"`
Location string `json:"location"`
Active *bool `json:"active"`
Admin *bool `json:"admin"`
AllowGitHook *bool `json:"allow_git_hook"`
AllowImportLocal *bool `json:"allow_import_local"`
MaxRepoCreation *int `json:"max_repo_creation"`
ProhibitLogin *bool `json:"prohibit_login"`
AllowCreateOrganization *bool `json:"allow_create_organization"`
SourceID int64 `json:"source_id"`
LoginName string `json:"login_name"`
Email *string `json:"email"`
FullName *string `json:"full_name"`
Password string `json:"password"`
MustChangePassword *bool `json:"must_change_password"`
Website *string `json:"website"`
Location *string `json:"location"`
Active *bool `json:"active"`
Admin *bool `json:"admin"`
AllowGitHook *bool `json:"allow_git_hook"`
AllowImportLocal *bool `json:"allow_import_local"`
MaxRepoCreation *int `json:"max_repo_creation"`
ProhibitLogin *bool `json:"prohibit_login"`
AllowCreateOrganization *bool `json:"allow_create_organization"`
}
// AdminEditUser modify user informations

View File

@ -26,7 +26,7 @@ func Version() string {
return "0.14.0"
}
// Client represents a Gitea API client.
// Client represents a thread-safe Gitea API client.
type Client struct {
url string
accessToken string
@ -37,6 +37,7 @@ type Client struct {
debug bool
client *http.Client
ctx context.Context
mutex sync.RWMutex
serverVersion *version.Version
getVersionOnce sync.Once
}
@ -47,6 +48,7 @@ type Response struct {
}
// NewClient initializes and returns a API client.
// Usage of all gitea.Client methods is concurrency-safe.
func NewClient(url string, options ...func(*Client)) (*Client, error) {
client := &Client{
url: strings.TrimSuffix(url, "/"),
@ -72,14 +74,23 @@ func NewClientWithHTTP(url string, httpClient *http.Client) *Client {
// SetHTTPClient is an option for NewClient to set custom http client
func SetHTTPClient(httpClient *http.Client) func(client *Client) {
return func(client *Client) {
client.client = httpClient
client.SetHTTPClient(httpClient)
}
}
// SetHTTPClient replaces default http.Client with user given one.
func (c *Client) SetHTTPClient(client *http.Client) {
c.mutex.Lock()
c.client = client
c.mutex.Unlock()
}
// SetToken is an option for NewClient to set token
func SetToken(token string) func(client *Client) {
return func(client *Client) {
client.mutex.Lock()
client.accessToken = token
client.mutex.Unlock()
}
}
@ -92,7 +103,9 @@ func SetBasicAuth(username, password string) func(client *Client) {
// SetBasicAuth sets username and password
func (c *Client) SetBasicAuth(username, password string) {
c.mutex.Lock()
c.username, c.password = username, password
c.mutex.Unlock()
}
// SetOTP is an option for NewClient to set OTP for 2FA
@ -104,7 +117,9 @@ func SetOTP(otp string) func(client *Client) {
// SetOTP sets OTP for 2FA
func (c *Client) SetOTP(otp string) {
c.mutex.Lock()
c.otp = otp
c.mutex.Unlock()
}
// SetContext is an option for NewClient to set context
@ -116,12 +131,9 @@ func SetContext(ctx context.Context) func(client *Client) {
// SetContext set context witch is used for http requests
func (c *Client) SetContext(ctx context.Context) {
c.mutex.Lock()
c.ctx = ctx
}
// SetHTTPClient replaces default http.Client with user given one.
func (c *Client) SetHTTPClient(client *http.Client) {
c.client = client
c.mutex.Unlock()
}
// SetSudo is an option for NewClient to set sudo header
@ -133,43 +145,57 @@ func SetSudo(sudo string) func(client *Client) {
// SetSudo sets username to impersonate.
func (c *Client) SetSudo(sudo string) {
c.mutex.Lock()
c.sudo = sudo
c.mutex.Unlock()
}
// SetDebugMode is an option for NewClient to enable debug mode
func SetDebugMode() func(client *Client) {
return func(client *Client) {
client.mutex.Lock()
client.debug = true
client.mutex.Unlock()
}
}
func (c *Client) getWebResponse(method, path string, body io.Reader) ([]byte, *Response, error) {
if c.debug {
c.mutex.RLock()
debug := c.debug
if debug {
fmt.Printf("%s: %s\nBody: %v\n", method, c.url+path, body)
}
req, err := http.NewRequestWithContext(c.ctx, method, c.url+path, body)
client := c.client // client ref can change from this point on so safe it
c.mutex.RUnlock()
if err != nil {
return nil, nil, err
}
resp, err := c.client.Do(req)
resp, err := client.Do(req)
if err != nil {
return nil, nil, err
}
defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body)
if c.debug {
if debug {
fmt.Printf("Response: %v\n\n", resp)
}
return data, &Response{resp}, nil
}
func (c *Client) doRequest(method, path string, header http.Header, body io.Reader) (*Response, error) {
if c.debug {
c.mutex.RLock()
debug := c.debug
if debug {
fmt.Printf("%s: %s\nHeader: %v\nBody: %s\n", method, c.url+"/api/v1"+path, header, body)
}
req, err := http.NewRequestWithContext(c.ctx, method, c.url+"/api/v1"+path, body)
if err != nil {
c.mutex.RUnlock()
return nil, err
}
if len(c.accessToken) != 0 {
@ -184,20 +210,66 @@ func (c *Client) doRequest(method, path string, header http.Header, body io.Read
if len(c.sudo) != 0 {
req.Header.Set("Sudo", c.sudo)
}
client := c.client // client ref can change from this point on so safe it
c.mutex.RUnlock()
for k, v := range header {
req.Header[k] = v
}
resp, err := c.client.Do(req)
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if c.debug {
if debug {
fmt.Printf("Response: %v\n\n", resp)
}
return &Response{resp}, nil
}
// Converts a response for a HTTP status code indicating an error condition
// (non-2XX) to a well-known error value and response body. For non-problematic
// (2XX) status codes nil will be returned. Note that on a non-2XX response, the
// response body stream will have been read and, hence, is closed on return.
func statusCodeToErr(resp *Response) (body []byte, err error) {
// no error
if resp.StatusCode/100 == 2 {
return nil, nil
}
//
// error: body will be read for details
//
defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("body read on HTTP error %d: %v", resp.StatusCode, err)
}
switch resp.StatusCode {
case 403:
return data, errors.New("403 Forbidden")
case 404:
return data, errors.New("404 Not Found")
case 409:
return data, errors.New("409 Conflict")
case 422:
return data, fmt.Errorf("422 Unprocessable Entity: %s", string(data))
}
path := resp.Request.URL.Path
method := resp.Request.Method
header := resp.Request.Header
errMap := make(map[string]interface{})
if err = json.Unmarshal(data, &errMap); err != nil {
// when the JSON can't be parsed, data was probably empty or a
// plain string, so we try to return a helpful error anyway
return data, fmt.Errorf("Unknown API Error: %d\nRequest: '%s' with '%s' method '%s' header and '%s' body", resp.StatusCode, path, method, header, string(data))
}
return data, errors.New(errMap["message"].(string))
}
func (c *Client) getResponse(method, path string, header http.Header, body io.Reader) ([]byte, *Response, error) {
resp, err := c.doRequest(method, path, header, body)
if err != nil {
@ -205,32 +277,18 @@ func (c *Client) getResponse(method, path string, header http.Header, body io.Re
}
defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body)
// check for errors
data, err := statusCodeToErr(resp)
if err != nil {
return data, resp, err
}
// success (2XX), read body
data, err = ioutil.ReadAll(resp.Body)
if err != nil {
return nil, resp, err
}
switch resp.StatusCode {
case 403:
return data, resp, errors.New("403 Forbidden")
case 404:
return data, resp, errors.New("404 Not Found")
case 409:
return data, resp, errors.New("409 Conflict")
case 422:
return data, resp, fmt.Errorf("422 Unprocessable Entity: %s", string(data))
}
if resp.StatusCode/100 != 2 {
errMap := make(map[string]interface{})
if err = json.Unmarshal(data, &errMap); err != nil {
// when the JSON can't be parsed, data was probably empty or a plain string,
// so we try to return a helpful error anyway
return data, resp, fmt.Errorf("Unknown API Error: %d\nRequest: '%s' with '%s' method '%s' header and '%s' body", resp.StatusCode, path, method, header, string(data))
}
return data, resp, errors.New(errMap["message"].(string))
}
return data, resp, nil
}

View File

@ -218,11 +218,12 @@ type EditIssueOption struct {
Ref *string `json:"ref"`
// deprecated
// TODO: rm on sdk 0.15.0
Assignee *string `json:"assignee"`
Assignees []string `json:"assignees"`
Milestone *int64 `json:"milestone"`
State *StateType `json:"state"`
Deadline *time.Time `json:"due_date"`
Assignee *string `json:"assignee"`
Assignees []string `json:"assignees"`
Milestone *int64 `json:"milestone"`
State *StateType `json:"state"`
Deadline *time.Time `json:"due_date"`
RemoveDeadline *bool `json:"unset_due_date"`
}
// Validate the EditIssueOption struct
@ -252,6 +253,8 @@ func (c *Client) EditIssue(owner, repo string, index int64, opt EditIssueOption)
func (c *Client) issueBackwardsCompatibility(issue *Issue) {
if c.checkServerVersionGreaterThanOrEqual(version1_12_0) != nil {
c.mutex.RLock()
issue.HTMLURL = fmt.Sprintf("%s/%s/issues/%d", c.url, issue.Repository.FullName, issue.Index)
c.mutex.RUnlock()
}
}

View File

@ -11,8 +11,13 @@ import (
// StopWatch represents a running stopwatch of an issue / pr
type StopWatch struct {
Created time.Time `json:"created"`
IssueIndex int64 `json:"issue_index"`
Created time.Time `json:"created"`
Seconds int64 `json:"seconds"`
Duration string `json:"duration"`
IssueIndex int64 `json:"issue_index"`
IssueTitle string `json:"issue_title"`
RepoOwnerName string `json:"repo_owner_name"`
RepoName string `json:"repo_name"`
}
// GetMyStopwatches list all stopwatches

View File

@ -33,15 +33,17 @@ const (
// PullReview represents a pull request review
type PullReview struct {
ID int64 `json:"id"`
Reviewer *User `json:"user"`
State ReviewStateType `json:"state"`
Body string `json:"body"`
CommitID string `json:"commit_id"`
ID int64 `json:"id"`
Reviewer *User `json:"user"`
ReviewerTeam *Team `json:"team"`
State ReviewStateType `json:"state"`
Body string `json:"body"`
CommitID string `json:"commit_id"`
// Stale indicates if the pull has changed since the review
Stale bool `json:"stale"`
// Official indicates if the review counts towards the required approval limit, if PR base is a protected branch
Official bool `json:"official"`
Dismissed bool `json:"dismissed"`
CodeCommentsCount int `json:"comments_count"`
Submitted time.Time `json:"submitted_at"`
@ -95,6 +97,17 @@ type SubmitPullReviewOptions struct {
Body string `json:"body"`
}
// DismissPullReviewOptions are options to dismiss a pull review
type DismissPullReviewOptions struct {
Message string `json:"message"`
}
// PullReviewRequestOptions are options to add or remove pull review requests
type PullReviewRequestOptions struct {
Reviewers []string `json:"reviewers"`
TeamReviewers []string `json:"team_reviewers"`
}
// ListPullReviewsOptions options for listing PullReviews
type ListPullReviewsOptions struct {
ListOptions
@ -219,3 +232,63 @@ func (c *Client) SubmitPullReview(owner, repo string, index, id int64, opt Submi
jsonHeader, bytes.NewReader(body), r)
return r, resp, err
}
// CreateReviewRequests create review requests to an pull request
func (c *Client) CreateReviewRequests(owner, repo string, index int64, opt PullReviewRequestOptions) (*Response, error) {
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
return nil, err
}
body, err := json.Marshal(&opt)
if err != nil {
return nil, err
}
_, resp, err := c.getResponse("POST",
fmt.Sprintf("/repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, index),
jsonHeader, bytes.NewReader(body))
return resp, err
}
// DeleteReviewRequests delete review requests to an pull request
func (c *Client) DeleteReviewRequests(owner, repo string, index int64, opt PullReviewRequestOptions) (*Response, error) {
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
return nil, err
}
body, err := json.Marshal(&opt)
if err != nil {
return nil, err
}
_, resp, err := c.getResponse("DELETE",
fmt.Sprintf("/repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, index),
jsonHeader, bytes.NewReader(body))
return resp, err
}
// DismissPullReview dismiss a review for a pull request
func (c *Client) DismissPullReview(owner, repo string, index, id int64, opt DismissPullReviewOptions) (*Response, error) {
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
return nil, err
}
body, err := json.Marshal(&opt)
if err != nil {
return nil, err
}
_, resp, err := c.getResponse("POST",
fmt.Sprintf("/repos/%s/%s/pulls/%d/reviews/%d/dismissals", owner, repo, index, id),
jsonHeader, bytes.NewReader(body))
return resp, err
}
// UnDismissPullReview cancel to dismiss a review for a pull request
func (c *Client) UnDismissPullReview(owner, repo string, index, id int64) (*Response, error) {
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
return nil, err
}
_, resp, err := c.getResponse("POST",
fmt.Sprintf("/repos/%s/%s/pulls/%d/reviews/%d/undismissals", owner, repo, index, id),
jsonHeader, nil)
return resp, err
}

View File

@ -21,6 +21,7 @@ type Release struct {
Title string `json:"name"`
Note string `json:"body"`
URL string `json:"url"`
HTMLURL string `json:"html_url"`
TarURL string `json:"tarball_url"`
ZipURL string `json:"zipball_url"`
IsDraft bool `json:"draft"`
@ -132,8 +133,8 @@ func (c *Client) DeleteRelease(user, repo string, id int64) (*Response, error) {
return resp, err
}
// DeleteReleaseTag deletes a tag from a repository, if no release refers to it.
func (c *Client) DeleteReleaseTag(user, repo string, tag string) (*Response, error) {
// DeleteReleaseByTag deletes a release frm a repository by tag
func (c *Client) DeleteReleaseByTag(user, repo string, tag string) (*Response, error) {
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
return nil, err
}

View File

@ -9,6 +9,7 @@ import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/url"
"strings"
"time"
@ -21,42 +22,77 @@ type Permission struct {
Pull bool `json:"pull"`
}
// InternalTracker represents settings for internal tracker
type InternalTracker struct {
// Enable time tracking (Built-in issue tracker)
EnableTimeTracker bool `json:"enable_time_tracker"`
// Let only contributors track time (Built-in issue tracker)
AllowOnlyContributorsToTrackTime bool `json:"allow_only_contributors_to_track_time"`
// Enable dependencies for issues and pull requests (Built-in issue tracker)
EnableIssueDependencies bool `json:"enable_issue_dependencies"`
}
// ExternalTracker represents settings for external tracker
type ExternalTracker struct {
// URL of external issue tracker.
ExternalTrackerURL string `json:"external_tracker_url"`
// External Issue Tracker URL Format. Use the placeholders {user}, {repo} and {index} for the username, repository name and issue index.
ExternalTrackerFormat string `json:"external_tracker_format"`
// External Issue Tracker Number Format, either `numeric` or `alphanumeric`
ExternalTrackerStyle string `json:"external_tracker_style"`
}
// ExternalWiki represents setting for external wiki
type ExternalWiki struct {
// URL of external wiki.
ExternalWikiURL string `json:"external_wiki_url"`
}
// Repository represents a repository
type Repository struct {
ID int64 `json:"id"`
Owner *User `json:"owner"`
Name string `json:"name"`
FullName string `json:"full_name"`
Description string `json:"description"`
Empty bool `json:"empty"`
Private bool `json:"private"`
Fork bool `json:"fork"`
Parent *Repository `json:"parent"`
Mirror bool `json:"mirror"`
Size int `json:"size"`
HTMLURL string `json:"html_url"`
SSHURL string `json:"ssh_url"`
CloneURL string `json:"clone_url"`
OriginalURL string `json:"original_url"`
Website string `json:"website"`
Stars int `json:"stars_count"`
Forks int `json:"forks_count"`
Watchers int `json:"watchers_count"`
OpenIssues int `json:"open_issues_count"`
DefaultBranch string `json:"default_branch"`
Archived bool `json:"archived"`
Created time.Time `json:"created_at"`
Updated time.Time `json:"updated_at"`
Permissions *Permission `json:"permissions,omitempty"`
HasIssues bool `json:"has_issues"`
HasWiki bool `json:"has_wiki"`
HasPullRequests bool `json:"has_pull_requests"`
IgnoreWhitespaceConflicts bool `json:"ignore_whitespace_conflicts"`
AllowMerge bool `json:"allow_merge_commits"`
AllowRebase bool `json:"allow_rebase"`
AllowRebaseMerge bool `json:"allow_rebase_explicit"`
AllowSquash bool `json:"allow_squash_merge"`
AvatarURL string `json:"avatar_url"`
ID int64 `json:"id"`
Owner *User `json:"owner"`
Name string `json:"name"`
FullName string `json:"full_name"`
Description string `json:"description"`
Empty bool `json:"empty"`
Private bool `json:"private"`
Fork bool `json:"fork"`
Template bool `json:"template"`
Parent *Repository `json:"parent"`
Mirror bool `json:"mirror"`
Size int `json:"size"`
HTMLURL string `json:"html_url"`
SSHURL string `json:"ssh_url"`
CloneURL string `json:"clone_url"`
OriginalURL string `json:"original_url"`
Website string `json:"website"`
Stars int `json:"stars_count"`
Forks int `json:"forks_count"`
Watchers int `json:"watchers_count"`
OpenIssues int `json:"open_issues_count"`
OpenPulls int `json:"open_pr_counter"`
Releases int `json:"release_counter"`
DefaultBranch string `json:"default_branch"`
Archived bool `json:"archived"`
Created time.Time `json:"created_at"`
Updated time.Time `json:"updated_at"`
Permissions *Permission `json:"permissions,omitempty"`
HasIssues bool `json:"has_issues"`
InternalTracker *InternalTracker `json:"internal_tracker,omitempty"`
ExternalTracker *ExternalTracker `json:"external_tracker,omitempty"`
HasWiki bool `json:"has_wiki"`
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
HasPullRequests bool `json:"has_pull_requests"`
HasProjects bool `json:"has_projects"`
IgnoreWhitespaceConflicts bool `json:"ignore_whitespace_conflicts"`
AllowMerge bool `json:"allow_merge_commits"`
AllowRebase bool `json:"allow_rebase"`
AllowRebaseMerge bool `json:"allow_rebase_explicit"`
AllowSquash bool `json:"allow_squash_merge"`
AvatarURL string `json:"avatar_url"`
Internal bool `json:"internal"`
MirrorInterval string `json:"mirror_interval"`
}
// RepoType represent repo type
@ -346,14 +382,24 @@ type EditRepoOption struct {
// Note: you will get a 422 error if the organization restricts changing repository visibility to organization
// owners and a non-owner tries to change the value of private.
Private *bool `json:"private,omitempty"`
// either `true` to make this repository a template or `false` to make it a normal repository
Template *bool `json:"template,omitempty"`
// either `true` to enable issues for this repository or `false` to disable them.
HasIssues *bool `json:"has_issues,omitempty"`
// set this structure to configure internal issue tracker (requires has_issues)
InternalTracker *InternalTracker `json:"internal_tracker,omitempty"`
// set this structure to use external issue tracker (requires has_issues)
ExternalTracker *ExternalTracker `json:"external_tracker,omitempty"`
// either `true` to enable the wiki for this repository or `false` to disable it.
HasWiki *bool `json:"has_wiki,omitempty"`
// set this structure to use external wiki instead of internal (requires has_wiki)
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
// sets the default branch for this repository.
DefaultBranch *string `json:"default_branch,omitempty"`
// either `true` to allow pull requests, or `false` to prevent pull request.
HasPullRequests *bool `json:"has_pull_requests,omitempty"`
// either `true` to enable project unit, or `false` to disable them.
HasProjects *bool `json:"has_projects,omitempty"`
// either `true` to ignore whitespace for conflicts, or `false` to not ignore whitespace. `has_pull_requests` must be `true`.
IgnoreWhitespaceConflicts *bool `json:"ignore_whitespace_conflicts,omitempty"`
// either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. `has_pull_requests` must be `true`.
@ -366,6 +412,8 @@ type EditRepoOption struct {
AllowSquash *bool `json:"allow_squash_merge,omitempty"`
// set to `true` to archive this repository.
Archived *bool `json:"archived,omitempty"`
// set to a string like `8h30m0s` to set the mirror interval time
MirrorInterval *string `json:"mirror_interval,omitempty"`
}
// EditRepo edit the properties of a repository
@ -420,3 +468,20 @@ const (
func (c *Client) GetArchive(owner, repo, ref string, ext ArchiveType) ([]byte, *Response, error) {
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/archive/%s%s", owner, repo, url.PathEscape(ref), ext), nil, nil)
}
// GetArchiveReader gets a `git archive` for a particular tree-ish git reference
// such as a branch name (`master`), a commit hash (`70b7c74b33`), a tag
// (`v1.2.1`). The archive is returned as a byte stream in a ReadCloser. It is
// the responsibility of the client to close the reader.
func (c *Client) GetArchiveReader(owner, repo, ref string, ext ArchiveType) (io.ReadCloser, *Response, error) {
resp, err := c.doRequest("GET", fmt.Sprintf("/repos/%s/%s/archive/%s%s", owner, repo, url.PathEscape(ref), ext), nil, nil)
if err != nil {
return nil, resp, err
}
if _, err := statusCodeToErr(resp); err != nil {
return nil, resp, err
}
return resp.Body, resp, nil
}

View File

@ -14,75 +14,78 @@ import (
// BranchProtection represents a branch protection for a repository
type BranchProtection struct {
BranchName string `json:"branch_name"`
EnablePush bool `json:"enable_push"`
EnablePushWhitelist bool `json:"enable_push_whitelist"`
PushWhitelistUsernames []string `json:"push_whitelist_usernames"`
PushWhitelistTeams []string `json:"push_whitelist_teams"`
PushWhitelistDeployKeys bool `json:"push_whitelist_deploy_keys"`
EnableMergeWhitelist bool `json:"enable_merge_whitelist"`
MergeWhitelistUsernames []string `json:"merge_whitelist_usernames"`
MergeWhitelistTeams []string `json:"merge_whitelist_teams"`
EnableStatusCheck bool `json:"enable_status_check"`
StatusCheckContexts []string `json:"status_check_contexts"`
RequiredApprovals int64 `json:"required_approvals"`
EnableApprovalsWhitelist bool `json:"enable_approvals_whitelist"`
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
BlockOnRejectedReviews bool `json:"block_on_rejected_reviews"`
BlockOnOutdatedBranch bool `json:"block_on_outdated_branch"`
DismissStaleApprovals bool `json:"dismiss_stale_approvals"`
RequireSignedCommits bool `json:"require_signed_commits"`
ProtectedFilePatterns string `json:"protected_file_patterns"`
Created time.Time `json:"created_at"`
Updated time.Time `json:"updated_at"`
BranchName string `json:"branch_name"`
EnablePush bool `json:"enable_push"`
EnablePushWhitelist bool `json:"enable_push_whitelist"`
PushWhitelistUsernames []string `json:"push_whitelist_usernames"`
PushWhitelistTeams []string `json:"push_whitelist_teams"`
PushWhitelistDeployKeys bool `json:"push_whitelist_deploy_keys"`
EnableMergeWhitelist bool `json:"enable_merge_whitelist"`
MergeWhitelistUsernames []string `json:"merge_whitelist_usernames"`
MergeWhitelistTeams []string `json:"merge_whitelist_teams"`
EnableStatusCheck bool `json:"enable_status_check"`
StatusCheckContexts []string `json:"status_check_contexts"`
RequiredApprovals int64 `json:"required_approvals"`
EnableApprovalsWhitelist bool `json:"enable_approvals_whitelist"`
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
BlockOnRejectedReviews bool `json:"block_on_rejected_reviews"`
BlockOnOfficialReviewRequests bool `json:"block_on_official_review_requests"`
BlockOnOutdatedBranch bool `json:"block_on_outdated_branch"`
DismissStaleApprovals bool `json:"dismiss_stale_approvals"`
RequireSignedCommits bool `json:"require_signed_commits"`
ProtectedFilePatterns string `json:"protected_file_patterns"`
Created time.Time `json:"created_at"`
Updated time.Time `json:"updated_at"`
}
// CreateBranchProtectionOption options for creating a branch protection
type CreateBranchProtectionOption struct {
BranchName string `json:"branch_name"`
EnablePush bool `json:"enable_push"`
EnablePushWhitelist bool `json:"enable_push_whitelist"`
PushWhitelistUsernames []string `json:"push_whitelist_usernames"`
PushWhitelistTeams []string `json:"push_whitelist_teams"`
PushWhitelistDeployKeys bool `json:"push_whitelist_deploy_keys"`
EnableMergeWhitelist bool `json:"enable_merge_whitelist"`
MergeWhitelistUsernames []string `json:"merge_whitelist_usernames"`
MergeWhitelistTeams []string `json:"merge_whitelist_teams"`
EnableStatusCheck bool `json:"enable_status_check"`
StatusCheckContexts []string `json:"status_check_contexts"`
RequiredApprovals int64 `json:"required_approvals"`
EnableApprovalsWhitelist bool `json:"enable_approvals_whitelist"`
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
BlockOnRejectedReviews bool `json:"block_on_rejected_reviews"`
BlockOnOutdatedBranch bool `json:"block_on_outdated_branch"`
DismissStaleApprovals bool `json:"dismiss_stale_approvals"`
RequireSignedCommits bool `json:"require_signed_commits"`
ProtectedFilePatterns string `json:"protected_file_patterns"`
BranchName string `json:"branch_name"`
EnablePush bool `json:"enable_push"`
EnablePushWhitelist bool `json:"enable_push_whitelist"`
PushWhitelistUsernames []string `json:"push_whitelist_usernames"`
PushWhitelistTeams []string `json:"push_whitelist_teams"`
PushWhitelistDeployKeys bool `json:"push_whitelist_deploy_keys"`
EnableMergeWhitelist bool `json:"enable_merge_whitelist"`
MergeWhitelistUsernames []string `json:"merge_whitelist_usernames"`
MergeWhitelistTeams []string `json:"merge_whitelist_teams"`
EnableStatusCheck bool `json:"enable_status_check"`
StatusCheckContexts []string `json:"status_check_contexts"`
RequiredApprovals int64 `json:"required_approvals"`
EnableApprovalsWhitelist bool `json:"enable_approvals_whitelist"`
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
BlockOnRejectedReviews bool `json:"block_on_rejected_reviews"`
BlockOnOfficialReviewRequests bool `json:"block_on_official_review_requests"`
BlockOnOutdatedBranch bool `json:"block_on_outdated_branch"`
DismissStaleApprovals bool `json:"dismiss_stale_approvals"`
RequireSignedCommits bool `json:"require_signed_commits"`
ProtectedFilePatterns string `json:"protected_file_patterns"`
}
// EditBranchProtectionOption options for editing a branch protection
type EditBranchProtectionOption struct {
EnablePush *bool `json:"enable_push"`
EnablePushWhitelist *bool `json:"enable_push_whitelist"`
PushWhitelistUsernames []string `json:"push_whitelist_usernames"`
PushWhitelistTeams []string `json:"push_whitelist_teams"`
PushWhitelistDeployKeys *bool `json:"push_whitelist_deploy_keys"`
EnableMergeWhitelist *bool `json:"enable_merge_whitelist"`
MergeWhitelistUsernames []string `json:"merge_whitelist_usernames"`
MergeWhitelistTeams []string `json:"merge_whitelist_teams"`
EnableStatusCheck *bool `json:"enable_status_check"`
StatusCheckContexts []string `json:"status_check_contexts"`
RequiredApprovals *int64 `json:"required_approvals"`
EnableApprovalsWhitelist *bool `json:"enable_approvals_whitelist"`
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
BlockOnRejectedReviews *bool `json:"block_on_rejected_reviews"`
BlockOnOutdatedBranch *bool `json:"block_on_outdated_branch"`
DismissStaleApprovals *bool `json:"dismiss_stale_approvals"`
RequireSignedCommits *bool `json:"require_signed_commits"`
ProtectedFilePatterns *string `json:"protected_file_patterns"`
EnablePush *bool `json:"enable_push"`
EnablePushWhitelist *bool `json:"enable_push_whitelist"`
PushWhitelistUsernames []string `json:"push_whitelist_usernames"`
PushWhitelistTeams []string `json:"push_whitelist_teams"`
PushWhitelistDeployKeys *bool `json:"push_whitelist_deploy_keys"`
EnableMergeWhitelist *bool `json:"enable_merge_whitelist"`
MergeWhitelistUsernames []string `json:"merge_whitelist_usernames"`
MergeWhitelistTeams []string `json:"merge_whitelist_teams"`
EnableStatusCheck *bool `json:"enable_status_check"`
StatusCheckContexts []string `json:"status_check_contexts"`
RequiredApprovals *int64 `json:"required_approvals"`
EnableApprovalsWhitelist *bool `json:"enable_approvals_whitelist"`
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
BlockOnRejectedReviews *bool `json:"block_on_rejected_reviews"`
BlockOnOfficialReviewRequests *bool `json:"block_on_official_review_requests"`
BlockOnOutdatedBranch *bool `json:"block_on_outdated_branch"`
DismissStaleApprovals *bool `json:"dismiss_stale_approvals"`
RequireSignedCommits *bool `json:"require_signed_commits"`
ProtectedFilePatterns *string `json:"protected_file_patterns"`
}
// ListBranchProtectionsOptions list branch protection options

View File

@ -42,11 +42,12 @@ type RepoCommit struct {
// Commit contains information generated from a Git commit.
type Commit struct {
*CommitMeta
HTMLURL string `json:"html_url"`
RepoCommit *RepoCommit `json:"commit"`
Author *User `json:"author"`
Committer *User `json:"committer"`
Parents []*CommitMeta `json:"parents"`
HTMLURL string `json:"html_url"`
RepoCommit *RepoCommit `json:"commit"`
Author *User `json:"author"`
Committer *User `json:"committer"`
Parents []*CommitMeta `json:"parents"`
Files []*CommitAffectedFiles `json:"files"`
}
// CommitDateOptions store dates for GIT_AUTHOR_DATE and GIT_COMMITTER_DATE
@ -55,6 +56,11 @@ type CommitDateOptions struct {
Committer time.Time `json:"committer"`
}
// CommitAffectedFiles store information about files affected by the commit
type CommitAffectedFiles struct {
Filename string `json:"filename"`
}
// GetSingleCommit returns a single commit
func (c *Client) GetSingleCommit(user, repo, commitID string) (*Commit, *Response, error) {
commit := new(Commit)

View File

@ -9,6 +9,8 @@ import (
"bytes"
"encoding/json"
"fmt"
"net/url"
"strings"
)
// FileOptions options for all file APIs
@ -23,6 +25,8 @@ type FileOptions struct {
Author Identity `json:"author"`
Committer Identity `json:"committer"`
Dates CommitDateOptions `json:"dates"`
// Add a Signed-off-by trailer by the committer at the end of the commit log message.
Signoff bool `json:"signoff"`
}
// CreateFileOptions options for creating files
@ -112,22 +116,63 @@ type FileDeleteResponse struct {
Verification *PayloadCommitVerification `json:"verification"`
}
// GetFile downloads a file of repository, ref can be branch/tag/commit.
// e.g.: ref -> master, tree -> macaron.go(no leading slash)
func (c *Client) GetFile(user, repo, ref, tree string) ([]byte, *Response, error) {
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/raw/%s/%s", user, repo, ref, tree), nil, nil)
// pathEscapeSegments escapes segments of a path while not escaping forward slash
func pathEscapeSegments(path string) string {
slice := strings.Split(path, "/")
for index := range slice {
slice[index] = url.PathEscape(slice[index])
}
escapedPath := strings.Join(slice, "/")
return escapedPath
}
// GetContents get the metadata and contents (if a file) of an entry in a repository, or a list of entries if a dir
// GetFile downloads a file of repository, ref can be branch/tag/commit.
// e.g.: ref -> master, filepath -> README.md (no leading slash)
func (c *Client) GetFile(owner, repo, ref, filepath string) ([]byte, *Response, error) {
filepath = pathEscapeSegments(filepath)
if c.checkServerVersionGreaterThanOrEqual(version1_14_0) != nil {
ref = pathEscapeSegments(ref)
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/raw/%s/%s", owner, repo, ref, filepath), nil, nil)
}
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/raw/%s?ref=%s", owner, repo, filepath, url.QueryEscape(ref)), nil, nil)
}
// GetContents get the metadata and contents of a file in a repository
// ref is optional
func (c *Client) GetContents(owner, repo, ref, filepath string) (*ContentsResponse, *Response, error) {
data, resp, err := c.getDirOrFileContents(owner, repo, ref, filepath)
if err != nil {
return nil, resp, err
}
cr := new(ContentsResponse)
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/repos/%s/%s/contents/%s?ref=%s", owner, repo, filepath, ref), jsonHeader, nil, cr)
if json.Unmarshal(data, &cr) != nil {
return nil, resp, fmt.Errorf("expect file, got directory")
}
return cr, resp, err
}
// ListContents gets a list of entries in a dir
// ref is optional
func (c *Client) ListContents(owner, repo, ref, filepath string) ([]*ContentsResponse, *Response, error) {
data, resp, err := c.getDirOrFileContents(owner, repo, ref, filepath)
if err != nil {
return nil, resp, err
}
crl := make([]*ContentsResponse, 0)
if json.Unmarshal(data, &crl) != nil {
return nil, resp, fmt.Errorf("expect directory, got file")
}
return crl, resp, err
}
func (c *Client) getDirOrFileContents(owner, repo, ref, filepath string) ([]byte, *Response, error) {
filepath = pathEscapeSegments(strings.TrimPrefix(filepath, "/"))
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/contents/%s?ref=%s", owner, repo, filepath, url.QueryEscape(ref)), jsonHeader, nil)
}
// CreateFile create a file in a repository
func (c *Client) CreateFile(owner, repo, filepath string, opt CreateFileOptions) (*FileResponse, *Response, error) {
filepath = pathEscapeSegments(filepath)
var err error
if opt.BranchName, err = c.setDefaultBranchForOldVersions(owner, repo, opt.BranchName); err != nil {
return nil, nil, err
@ -144,6 +189,7 @@ func (c *Client) CreateFile(owner, repo, filepath string, opt CreateFileOptions)
// UpdateFile update a file in a repository
func (c *Client) UpdateFile(owner, repo, filepath string, opt UpdateFileOptions) (*FileResponse, *Response, error) {
filepath = pathEscapeSegments(filepath)
var err error
if opt.BranchName, err = c.setDefaultBranchForOldVersions(owner, repo, opt.BranchName); err != nil {
return nil, nil, err
@ -160,6 +206,7 @@ func (c *Client) UpdateFile(owner, repo, filepath string, opt UpdateFileOptions)
// DeleteFile delete a file from repository
func (c *Client) DeleteFile(owner, repo, filepath string, opt DeleteFileOptions) (*Response, error) {
filepath = pathEscapeSegments(filepath)
var err error
if opt.BranchName, err = c.setDefaultBranchForOldVersions(owner, repo, opt.BranchName); err != nil {
return nil, err

View File

@ -22,10 +22,8 @@ const (
GitServiceGitlab GitServiceType = "gitlab"
// GitServiceGitea represents a gitea service
GitServiceGitea GitServiceType = "gitea"
// Not supported jet
// // GitServiceGogs represents a gogs service
// GitServiceGogs GitServiceType = "gogs"
// GitServiceGogs represents a gogs service
GitServiceGogs GitServiceType = "gogs"
)
// MigrateRepoOption options for migrating a repository from an external service
@ -33,21 +31,22 @@ type MigrateRepoOption struct {
RepoName string `json:"repo_name"`
RepoOwner string `json:"repo_owner"`
// deprecated use RepoOwner
RepoOwnerID int64 `json:"uid"`
CloneAddr string `json:"clone_addr"`
Service GitServiceType `json:"service"`
AuthUsername string `json:"auth_username"`
AuthPassword string `json:"auth_password"`
AuthToken string `json:"auth_token"`
Mirror bool `json:"mirror"`
Private bool `json:"private"`
Description string `json:"description"`
Wiki bool `json:"wiki"`
Milestones bool `json:"milestones"`
Labels bool `json:"labels"`
Issues bool `json:"issues"`
PullRequests bool `json:"pull_requests"`
Releases bool `json:"releases"`
RepoOwnerID int64 `json:"uid"`
CloneAddr string `json:"clone_addr"`
Service GitServiceType `json:"service"`
AuthUsername string `json:"auth_username"`
AuthPassword string `json:"auth_password"`
AuthToken string `json:"auth_token"`
Mirror bool `json:"mirror"`
Private bool `json:"private"`
Description string `json:"description"`
Wiki bool `json:"wiki"`
Milestones bool `json:"milestones"`
Labels bool `json:"labels"`
Issues bool `json:"issues"`
PullRequests bool `json:"pull_requests"`
Releases bool `json:"releases"`
MirrorInterval string `json:"mirror_interval"`
}
// Validate the MigrateRepoOption struct
@ -67,17 +66,24 @@ func (opt *MigrateRepoOption) Validate(c *Client) error {
switch opt.Service {
case GitServiceGithub:
if len(opt.AuthToken) == 0 {
return fmt.Errorf("github require token authentication")
return fmt.Errorf("github requires token authentication")
}
case GitServiceGitlab, GitServiceGitea:
if len(opt.AuthToken) == 0 {
return fmt.Errorf("%s require token authentication", opt.Service)
return fmt.Errorf("%s requires token authentication", opt.Service)
}
// Gitlab is supported since 1.12.0 but api cant handle it until 1.13.0
// https://github.com/go-gitea/gitea/pull/12672
if c.checkServerVersionGreaterThanOrEqual(version1_13_0) != nil {
return fmt.Errorf("migrate from service %s need gitea >= 1.13.0", opt.Service)
}
case GitServiceGogs:
if len(opt.AuthToken) == 0 {
return fmt.Errorf("gogs requires token authentication")
}
if c.checkServerVersionGreaterThanOrEqual(version1_14_0) != nil {
return fmt.Errorf("migrate from service gogs need gitea >= 1.14.0")
}
}
return nil
}

81
vendor/code.gitea.io/sdk/gitea/repo_stars.go generated vendored Normal file
View File

@ -0,0 +1,81 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package gitea
import (
"fmt"
"net/http"
)
// ListStargazersOptions options for listing a repository's stargazers
type ListStargazersOptions struct {
ListOptions
}
// ListRepoStargazers list a repository's stargazers
func (c *Client) ListRepoStargazers(user, repo string, opt ListStargazersOptions) ([]*User, *Response, error) {
opt.setDefaults()
stargazers := make([]*User, 0, opt.PageSize)
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/repos/%s/%s/stargazers?%s", user, repo, opt.getURLQuery().Encode()), nil, nil, &stargazers)
return stargazers, resp, err
}
// GetStarredRepos returns the repos that the given user has starred
func (c *Client) GetStarredRepos(user string) ([]*Repository, *Response, error) {
repos := make([]*Repository, 0, 10)
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/users/%s/starred", user), jsonHeader, nil, &repos)
return repos, resp, err
}
// GetMyStarredRepos returns the repos that the authenticated user has starred
func (c *Client) GetMyStarredRepos() ([]*Repository, *Response, error) {
repos := make([]*Repository, 0, 10)
resp, err := c.getParsedResponse("GET", "/user/starred", jsonHeader, nil, &repos)
return repos, resp, err
}
// IsRepoStarring returns whether the authenticated user has starred the repo or not
func (c *Client) IsRepoStarring(user, repo string) (bool, *Response, error) {
_, resp, err := c.getResponse("GET", fmt.Sprintf("/user/starred/%s/%s", user, repo), jsonHeader, nil)
if resp != nil {
switch resp.StatusCode {
case http.StatusNotFound:
return false, resp, nil
case http.StatusNoContent:
return true, resp, nil
default:
return false, resp, fmt.Errorf("unexpected status code '%d'", resp.StatusCode)
}
}
return false, nil, err
}
// StarRepo star specified repo as the authenticated user
func (c *Client) StarRepo(user, repo string) (*Response, error) {
_, resp, err := c.getResponse("PUT", fmt.Sprintf("/user/starred/%s/%s", user, repo), jsonHeader, nil)
if resp != nil {
switch resp.StatusCode {
case http.StatusNoContent:
return resp, nil
default:
return resp, fmt.Errorf("unexpected status code '%d'", resp.StatusCode)
}
}
return nil, err
}
// UnStarRepo remove star to specified repo as the authenticated user
func (c *Client) UnStarRepo(user, repo string) (*Response, error) {
_, resp, err := c.getResponse("DELETE", fmt.Sprintf("/user/starred/%s/%s", user, repo), jsonHeader, nil)
if resp != nil {
switch resp.StatusCode {
case http.StatusNoContent:
return resp, nil
default:
return resp, fmt.Errorf("unexpected status code '%d'", resp.StatusCode)
}
}
return nil, err
}

View File

@ -29,3 +29,14 @@ func (c *Client) ListRepoTags(user, repo string, opt ListRepoTagsOptions) ([]*Ta
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/repos/%s/%s/tags?%s", user, repo, opt.getURLQuery().Encode()), nil, nil, &tags)
return tags, resp, err
}
// DeleteTag deletes a tag from a repository, if no release refers to it
func (c *Client) DeleteTag(user, repo string, tag string) (*Response, error) {
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
return nil, err
}
_, resp, err := c.getResponse("DELETE",
fmt.Sprintf("/repos/%s/%s/tags/%s", user, repo, tag),
nil, nil)
return resp, err
}

View File

@ -6,13 +6,15 @@ package gitea
// GlobalUISettings represent the global ui settings of a gitea instance witch is exposed by API
type GlobalUISettings struct {
DefaultTheme string `json:"default_theme"`
AllowedReactions []string `json:"allowed_reactions"`
}
// GlobalRepoSettings represent the global repository settings of a gitea instance witch is exposed by API
type GlobalRepoSettings struct {
MirrorsDisabled bool `json:"mirrors_disabled"`
HTTPGitDisabled bool `json:"http_git_disabled"`
MirrorsDisabled bool `json:"mirrors_disabled"`
HTTPGitDisabled bool `json:"http_git_disabled"`
MigrationsDisabled bool `json:"migrations_disabled"`
}
// GlobalAPISettings contains global api settings exposed by it

View File

@ -27,12 +27,15 @@ type ListAccessTokensOptions struct {
// ListAccessTokens lists all the access tokens of user
func (c *Client) ListAccessTokens(opts ListAccessTokensOptions) ([]*AccessToken, *Response, error) {
if len(c.username) == 0 {
c.mutex.RLock()
username := c.username
c.mutex.RUnlock()
if len(username) == 0 {
return nil, nil, fmt.Errorf("\"username\" not set: only BasicAuth allowed")
}
opts.setDefaults()
tokens := make([]*AccessToken, 0, opts.PageSize)
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/users/%s/tokens?%s", c.username, opts.getURLQuery().Encode()), jsonHeader, nil, &tokens)
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/users/%s/tokens?%s", username, opts.getURLQuery().Encode()), jsonHeader, nil, &tokens)
return tokens, resp, err
}
@ -43,7 +46,10 @@ type CreateAccessTokenOption struct {
// CreateAccessToken create one access token with options
func (c *Client) CreateAccessToken(opt CreateAccessTokenOption) (*AccessToken, *Response, error) {
if len(c.username) == 0 {
c.mutex.RLock()
username := c.username
c.mutex.RUnlock()
if len(username) == 0 {
return nil, nil, fmt.Errorf("\"username\" not set: only BasicAuth allowed")
}
body, err := json.Marshal(&opt)
@ -51,13 +57,16 @@ func (c *Client) CreateAccessToken(opt CreateAccessTokenOption) (*AccessToken, *
return nil, nil, err
}
t := new(AccessToken)
resp, err := c.getParsedResponse("POST", fmt.Sprintf("/users/%s/tokens", c.username), jsonHeader, bytes.NewReader(body), t)
resp, err := c.getParsedResponse("POST", fmt.Sprintf("/users/%s/tokens", username), jsonHeader, bytes.NewReader(body), t)
return t, resp, err
}
// DeleteAccessToken delete token, identified by ID and if not available by name
func (c *Client) DeleteAccessToken(value interface{}) (*Response, error) {
if len(c.username) == 0 {
c.mutex.RLock()
username := c.username
c.mutex.RUnlock()
if len(username) == 0 {
return nil, fmt.Errorf("\"username\" not set: only BasicAuth allowed")
}
@ -75,6 +84,6 @@ func (c *Client) DeleteAccessToken(value interface{}) (*Response, error) {
return nil, fmt.Errorf("only string and int64 supported")
}
_, resp, err := c.getResponse("DELETE", fmt.Sprintf("/users/%s/tokens/%s", c.username, token), jsonHeader, nil)
_, resp, err := c.getResponse("DELETE", fmt.Sprintf("/users/%s/tokens/%s", username, token), jsonHeader, nil)
return resp, err
}

View File

@ -31,7 +31,10 @@ func (c *Client) CheckServerVersionConstraint(constraint string) error {
return err
}
if !check.Check(c.serverVersion) {
return fmt.Errorf("gitea server at %s does not satisfy version constraint %s", c.url, constraint)
c.mutex.RLock()
url := c.url
c.mutex.RUnlock()
return fmt.Errorf("gitea server at %s does not satisfy version constraint %s", url, constraint)
}
return nil
}
@ -51,7 +54,10 @@ func (c *Client) checkServerVersionGreaterThanOrEqual(v *version.Version) error
}
if !c.serverVersion.GreaterThanOrEqual(v) {
return fmt.Errorf("gitea server at %s is older than %s", c.url, v.Original())
c.mutex.RLock()
url := c.url
c.mutex.RUnlock()
return fmt.Errorf("gitea server at %s is older than %s", url, v.Original())
}
return nil
}

View File

@ -105,7 +105,8 @@ func (i *Input) OnChange(key rune, config *PromptConfig) (bool, error) {
}
} else if key == terminal.KeyDelete || key == terminal.KeyBackspace {
if i.answer != "" {
i.answer = i.answer[0 : len(i.answer)-1]
runeAnswer := []rune(i.answer)
i.answer = string(runeAnswer[0 : len(runeAnswer)-1])
}
} else if key >= terminal.KeySpace {
i.answer += string(key)

View File

@ -113,7 +113,8 @@ func (m *MultiSelect) OnChange(key rune, config *PromptConfig) {
m.filter = ""
} else if key == terminal.KeyDelete || key == terminal.KeyBackspace {
if m.filter != "" {
m.filter = m.filter[0 : len(m.filter)-1]
runeFilter := []rune(m.filter)
m.filter = string(runeFilter[0 : len(runeFilter)-1])
}
} else if key >= terminal.KeySpace {
m.filter += string(key)

View File

@ -114,8 +114,9 @@ func (s *Select) OnChange(key rune, config *PromptConfig) bool {
} else if key == terminal.KeyDelete || key == terminal.KeyBackspace {
// if there is content in the filter to delete
if s.filter != "" {
runeFilter := []rune(s.filter)
// subtract a line from the current filter
s.filter = s.filter[0 : len(s.filter)-1]
s.filter = string(runeFilter[0 : len(runeFilter)-1])
// we removed the last value in the filter
}
} else if key >= terminal.KeySpace {

View File

@ -18,18 +18,21 @@ func TransformString(f func(s string) string) Transformer {
return func(ans interface{}) interface{} {
// if the answer value passed in is the zero value of the appropriate type
if isZero(reflect.ValueOf(ans)) {
// skip this `Transformer` by returning a nil value.
// skip this `Transformer` by returning a zero value of string.
// The original answer will be not affected,
// see survey.go#L125.
return nil
// A zero value of string should be returned to be handled by
// next Transformer in a composed Tranformer,
// see tranform.go#L75
return ""
}
// "ans" is never nil here, so we don't have to check that
// see survey.go#L97 for more.
// see survey.go#L338 for more.
// Make sure that the the answer's value was a typeof string.
s, ok := ans.(string)
if !ok {
return nil
return ""
}
return f(s)

View File

@ -3,7 +3,7 @@ module github.com/Microsoft/go-winio
go 1.12
require (
github.com/pkg/errors v0.8.1
github.com/pkg/errors v0.9.1
github.com/sirupsen/logrus v1.4.1
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3
)

View File

@ -2,8 +2,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sirupsen/logrus v1.4.1 h1:GL2rEmy6nsikmW0r8opw9JIRScdMF5hA8cOYLH7In1k=
@ -12,7 +12,5 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b h1:ag/x1USPSsqHud38I9BAC88qdNLDHHtQ4mlgQIZPPNA=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3 h1:7TYNF4UdlohbFwpNH04CoPMp1cHUZgO1Ebq5r2hIjfo=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=

View File

@ -429,10 +429,10 @@ type PipeConfig struct {
// when the pipe is in message mode.
MessageMode bool
// InputBufferSize specifies the size the input buffer, in bytes.
// InputBufferSize specifies the size of the input buffer, in bytes.
InputBufferSize int32
// OutputBufferSize specifies the size the input buffer, in bytes.
// OutputBufferSize specifies the size of the output buffer, in bytes.
OutputBufferSize int32
}

161
vendor/github.com/adrg/xdg/README.md generated vendored
View File

@ -1,21 +1,50 @@
xdg
===
<h1 align="center">
<div>
<img src="https://raw.githubusercontent.com/adrg/adrg.github.io/master/assets/projects/xdg/logo.png" height="80px" alt="xdg logo"/>
</div>
</h1>
[![Build Status](https://github.com/adrg/xdg/workflows/CI/badge.svg)](https://github.com/adrg/xdg/actions?query=workflow%3ACI)
[![Code coverage](https://codecov.io/gh/adrg/xdg/branch/master/graphs/badge.svg?branch=master)](https://codecov.io/gh/adrg/xdg)
[![pkg.go.dev documentation](https://pkg.go.dev/badge/github.com/adrg/xdg)](https://pkg.go.dev/github.com/adrg/xdg)
[![MIT license](https://img.shields.io/badge/license-MIT-red.svg?style=flat-square)](https://opensource.org/licenses/MIT)
[![Go report card](https://goreportcard.com/badge/github.com/adrg/xdg)](https://goreportcard.com/report/github.com/adrg/xdg)
[![GitHub issues](https://img.shields.io/github/issues/adrg/xdg)](https://github.com/adrg/xdg/issues)
[![Buy me a coffee](https://img.shields.io/static/v1.svg?label=%20&message=Buy%20me%20a%20coffee&color=FF813F&logo=buy%20me%20a%20coffee&logoColor=white)](https://www.buymeacoffee.com/adrg)
[![GitHub stars](https://img.shields.io/github/stars/adrg/xdg?style=social)](https://github.com/adrg/xdg/stargazers)
<h4 align="center">Go implementation of the XDG Base Directory Specification and XDG user directories.</h4>
<p align="center">
<a href="https://github.com/adrg/xdg/actions?query=workflow%3ACI">
<img alt="Build status" src="https://github.com/adrg/xdg/workflows/CI/badge.svg">
</a>
<a href="https://app.codecov.io/gh/adrg/xdg">
<img alt="Code coverage" src="https://codecov.io/gh/adrg/xdg/branch/master/graphs/badge.svg?branch=master">
</a>
<a href="https://pkg.go.dev/github.com/adrg/xdg">
<img alt="pkg.go.dev documentation" src="https://img.shields.io/badge/go.dev-reference-007d9c?logo=go&logoColor=white">
</a>
<a href="https://opensource.org/licenses/MIT" rel="nofollow">
<img alt="MIT license" src="https://img.shields.io/github/license/adrg/xdg">
</a>
<br />
<a href="https://goreportcard.com/report/github.com/adrg/xdg">
<img alt="Go report card" src="https://goreportcard.com/badge/github.com/adrg/xdg">
</a>
<a href="https://github.com/avelino/awesome-go#configuration">
<img alt="Awesome Go" src="https://awesome.re/mentioned-badge.svg">
</a>
<a href="https://github.com/adrg/xdg/graphs/contributors">
<img alt="GitHub contributors" src="https://img.shields.io/github/contributors/adrg/xdg" />
</a>
<a href="https://github.com/adrg/xdg/issues">
<img alt="GitHub open issues" src="https://img.shields.io/github/issues-raw/adrg/xdg">
</a>
<a href="https://ko-fi.com/T6T72WATK">
<img alt="Buy me a coffee" src="https://img.shields.io/static/v1.svg?label=%20&message=Buy%20me%20a%20coffee&color=579fbf&logo=buy%20me%20a%20coffee&logoColor=white">
</a>
</p>
Provides an implementation of the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html).
The specification defines a set of standard paths for storing application files,
including data and configuration files. For portability and flexibility reasons,
applications should use the XDG defined locations instead of hardcoding paths.
The package also includes the locations of well known [user directories](https://wiki.archlinux.org/index.php/XDG_user_directories).
The current implementation supports Windows, Mac OS and most flavors of Unix.
The package also includes the locations of well known [user directories](https://wiki.archlinux.org/index.php/XDG_user_directories)
and an implementation of the [state directory](https://wiki.debian.org/XDGBaseDirectorySpecification#Proposal:_STATE_directory) proposal.
Windows, macOS and most flavors of Unix are supported.
Full documentation can be found at: https://pkg.go.dev/github.com/adrg/xdg.
@ -29,19 +58,19 @@ present in the environment.
#### XDG Base Directory
| | Unix | Mac OS | Windows |
| :--- | :--- | :----- | :--- |
| XDG_DATA_HOME | `~/.local/share` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
| XDG_DATA_DIRS | `/usr/local/share`<br/>`/usr/share` | `/Library/Application Support` | `%APPDATA%\Roaming`<br/>`%PROGRAMDATA%` |
| XDG_CONFIG_HOME | `~/.config` | `~/Library/Preferences` | `%LOCALAPPDATA%` |
| XDG_CONFIG_DIRS | `/etc/xdg` | `/Library/Preferences` | `%PROGRAMDATA%` |
| XDG_CACHE_HOME | `~/.cache` | `~/Library/Caches` | `%LOCALAPPDATA%\cache` |
| XDG_RUNTIME_DIR | `/run/user/UID` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
| | Unix | macOS | Windows |
| :-------------- | :---------------------------------- | :------------------------------------------------------------------------------------ | :-------------------------------------- |
| XDG_DATA_HOME | `~/.local/share` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
| XDG_DATA_DIRS | `/usr/local/share`<br/>`/usr/share` | `/Library/Application Support` | `%APPDATA%\Roaming`<br/>`%PROGRAMDATA%` |
| XDG_CONFIG_HOME | `~/.config` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
| XDG_CONFIG_DIRS | `/etc/xdg` | `~/Library/Preferences`<br/>`/Library/Application Support`<br/>`/Library/Preferences` | `%PROGRAMDATA%` |
| XDG_CACHE_HOME | `~/.cache` | `~/Library/Caches` | `%LOCALAPPDATA%\cache` |
| XDG_RUNTIME_DIR | `/run/user/UID` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
#### XDG user directories
| | Unix | Mac OS | Windows |
| :--- | :--- | :----- | :--- |
| | Unix | macOS | Windows |
| :------------------ | :------------ | :------------ | :------------------------ |
| XDG_DESKTOP_DIR | `~/Desktop` | `~/Desktop` | `%USERPROFILE%/Desktop` |
| XDG_DOWNLOAD_DIR | `~/Downloads` | `~/Downloads` | `%USERPROFILE%/Downloads` |
| XDG_DOCUMENTS_DIR | `~/Documents` | `~/Documents` | `%USERPROFILE%/Documents` |
@ -53,43 +82,50 @@ present in the environment.
#### Non-standard directories
State directory
```
Unix
• ~/.local/state
macOS
• ~/Library/Application Support
Windows
• %LOCALAPPDATA%
```
Application directories
```
Unix:
- $XDG_DATA_HOME/applications
- ~/.local/share/applications
- /usr/local/share/applications
- /usr/share/applications
- $XDG_DATA_DIRS/applications
Mac OS:
- /Applications
Windows:
- %APPDATA%\Roaming\Microsoft\Windows\Start Menu\Programs
Unix
• $XDG_DATA_HOME/applications
• ~/.local/share/applications
• /usr/local/share/applications
• /usr/share/applications
• $XDG_DATA_DIRS/applications
macOS
• /Applications
Windows
• %APPDATA%\Roaming\Microsoft\Windows\Start Menu\Programs
```
Font Directories
Font directories
```
Unix:
- $XDG_DATA_HOME/fonts
- ~/.fonts
- ~/.local/share/fonts
- /usr/local/share/fonts
- /usr/share/fonts
- $XDG_DATA_DIRS/fonts
Mac OS:
- ~/Library/Fonts
- /Library/Fonts
- /System/Library/Fonts
- /Network/Library/Fonts
Windows:
- %windir%\Fonts
- %LOCALAPPDATA%\Microsoft\Windows\Fonts
Unix
• $XDG_DATA_HOME/fonts
• ~/.fonts
• ~/.local/share/fonts
• /usr/local/share/fonts
• /usr/share/fonts
• $XDG_DATA_DIRS/fonts
macOS
• ~/Library/Fonts
• /Library/Fonts
• /System/Library/Fonts
• /Network/Library/Fonts
Windows
• %windir%\Fonts
• %LOCALAPPDATA%\Microsoft\Windows\Fonts
```
## Usage
@ -115,6 +151,7 @@ func main() {
log.Println("Runtime directory:", xdg.RuntimeDir)
// Non-standard directories.
log.Println("Home state directory:", xdg.StateHome)
log.Println("Application directories:", xdg.ApplicationDirs)
log.Println("Font directories:", xdg.FontDirs)
@ -132,6 +169,7 @@ func main() {
// xdg.DataFile()
// xdg.CacheFile()
// xdg.RuntimeFile()
// xdg.StateFile()
// Finding application config files.
// SearchConfigFile takes one parameter which must contain the name of
@ -147,6 +185,7 @@ func main() {
// xdg.SearchDataFile()
// xdg.SearchCacheFile()
// xdg.SearchRuntimeFile()
// xdg.SearchStateFile()
}
```
@ -181,31 +220,27 @@ func main() {
## Contributing
Contributions in the form of pull requests, issues or just general feedback,
are always welcome.
See [CONTRIBUTING.MD](https://github.com/adrg/xdg/blob/master/CONTRIBUTING.md).
are always welcome.
See [CONTRIBUTING.MD](CONTRIBUTING.md).
**Contributors**:
[adrg](https://github.com/adrg),
[wichert](https://github.com/wichert),
[bouncepaw](https://github.com/bouncepaw),
[gabriel-vasile](https://github.com/gabriel-vasile).
[gabriel-vasile](https://github.com/gabriel-vasile),
[KalleDK](https://github.com/KalleDK).
## References
For more information see:
* [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html)
* [XDG user directories](https://wiki.archlinux.org/index.php/XDG_user_directories)
## Buy me a coffee
If you found this project useful and want to support it, consider buying me a coffee.
<a href="https://www.buymeacoffee.com/adrg">
<img src="https://cdn.buymeacoffee.com/buttons/v2/arial-orange.png" alt="Buy Me A Coffee" height="42px">
</a>
* [XDG state directory proposal](https://wiki.debian.org/XDGBaseDirectorySpecification#Proposal:_STATE_directory)
* [XDG_STATE_HOME proposal](https://lists.freedesktop.org/archives/xdg/2016-December/013803.html)
## License
Copyright (c) 2014 Adrian-George Bostan.
This project is licensed under the [MIT license](https://opensource.org/licenses/MIT).
See [LICENSE](https://github.com/adrg/xdg/blob/master/LICENSE) for more details.
See [LICENSE](LICENSE) for more details.

View File

@ -1,7 +1,5 @@
package xdg
import "os"
// XDG Base Directory environment variables.
const (
envDataHome = "XDG_DATA_HOME"
@ -10,6 +8,7 @@ const (
envConfigDirs = "XDG_CONFIG_DIRS"
envCacheHome = "XDG_CACHE_HOME"
envRuntimeDir = "XDG_RUNTIME_DIR"
envStateHome = "XDG_STATE_HOME"
)
type baseDirectories struct {
@ -21,6 +20,7 @@ type baseDirectories struct {
runtime string
// Non-standard directories.
stateHome string
fonts []string
applications []string
}
@ -38,29 +38,13 @@ func (bd baseDirectories) cacheFile(relPath string) (string, error) {
}
func (bd baseDirectories) runtimeFile(relPath string) (string, error) {
fi, err := os.Lstat(bd.runtime)
if err != nil {
if os.IsNotExist(err) {
return createPath(relPath, []string{bd.runtime})
}
return "", err
}
if fi.IsDir() {
// The runtime directory must be owned by the user.
if err = chown(bd.runtime, os.Getuid(), os.Getgid()); err != nil {
return "", err
}
} else {
// For security reasons, the runtime directory cannot be a symlink.
if err = os.Remove(bd.runtime); err != nil {
return "", err
}
}
return createPath(relPath, []string{bd.runtime})
}
func (bd baseDirectories) stateFile(relPath string) (string, error) {
return createPath(relPath, []string{bd.stateHome})
}
func (bd baseDirectories) searchDataFile(relPath string) (string, error) {
return searchFile(relPath, append([]string{bd.dataHome}, bd.data...))
}
@ -76,3 +60,7 @@ func (bd baseDirectories) searchCacheFile(relPath string) (string, error) {
func (bd baseDirectories) searchRuntimeFile(relPath string) (string, error) {
return searchFile(relPath, []string{bd.runtime})
}
func (bd baseDirectories) searchStateFile(relPath string) (string, error) {
return searchFile(relPath, []string{bd.stateHome})
}

1
vendor/github.com/adrg/xdg/go.sum generated vendored
View File

@ -5,6 +5,7 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@ -5,15 +5,23 @@ import (
)
func initBaseDirs(home string) {
homeAppSupport := filepath.Join(home, "Library", "Application Support")
rootAppSupport := "/Library/Application Support"
// Initialize base directories.
baseDirs.dataHome = xdgPath(envDataHome, filepath.Join(home, "Library", "Application Support"))
baseDirs.data = xdgPaths(envDataDirs, "/Library/Application Support")
baseDirs.configHome = xdgPath(envConfigHome, filepath.Join(home, "Library", "Preferences"))
baseDirs.config = xdgPaths(envConfigDirs, "/Library/Preferences")
baseDirs.dataHome = xdgPath(envDataHome, homeAppSupport)
baseDirs.data = xdgPaths(envDataDirs, rootAppSupport)
baseDirs.configHome = xdgPath(envConfigHome, homeAppSupport)
baseDirs.config = xdgPaths(envConfigDirs,
filepath.Join(home, "Library", "Preferences"),
rootAppSupport,
"/Library/Preferences",
)
baseDirs.cacheHome = xdgPath(envCacheHome, filepath.Join(home, "Library", "Caches"))
baseDirs.runtime = xdgPath(envRuntimeDir, filepath.Join(home, "Library", "Application Support"))
baseDirs.runtime = xdgPath(envRuntimeDir, homeAppSupport)
// Initialize non-standard directories.
baseDirs.stateHome = xdgPath(envStateHome, homeAppSupport)
baseDirs.applications = []string{
"/Applications",
}

View File

@ -18,6 +18,7 @@ func initBaseDirs(home string) {
baseDirs.runtime = xdgPath(envRuntimeDir, filepath.Join("/run/user", strconv.Itoa(os.Getuid())))
// Initialize non-standard directories.
baseDirs.stateHome = xdgPath(envStateHome, filepath.Join(home, ".local", "state"))
appDirs := []string{
filepath.Join(baseDirs.dataHome, "applications"),
filepath.Join(home, ".local/share/applications"),

View File

@ -43,6 +43,7 @@ func initBaseDirs(home string) {
baseDirs.runtime = xdgPath(envRuntimeDir, localAppDataDir)
// Initialize non-standard directories.
baseDirs.stateHome = xdgPath(envStateHome, localAppDataDir)
baseDirs.applications = []string{
filepath.Join(roamingAppDataDir, "Microsoft", "Windows", "Start Menu", "Programs"),
}

View File

@ -33,14 +33,6 @@ func homeDir() string {
return ""
}
func chown(name string, uid, gid int) error {
if goOS := runtime.GOOS; goOS == "windows" || goOS == "plan9" {
return nil
}
return os.Chown(name, uid, gid)
}
func exists(path string) bool {
_, err := os.Stat(path)
return err == nil || os.IsExist(err)

55
vendor/github.com/adrg/xdg/xdg.go generated vendored
View File

@ -12,6 +12,10 @@ flavors of Unix.
For more information regarding the XDG user directories see:
https://wiki.archlinux.org/index.php/XDG_user_directories
For more information regarding the XDG state directory proposal see:
https://wiki.debian.org/XDGBaseDirectorySpecification#Proposal:_STATE_directory
https://lists.freedesktop.org/archives/xdg/2016-December/013803.html
*/
package xdg
@ -21,14 +25,14 @@ var (
// DataHome defines the base directory relative to which user-specific
// data files should be stored. This directory is defined by the
// environment variable $XDG_DATA_HOME. If this variable is not set,
// $XDG_DATA_HOME environment variable. If the variable is not set,
// a default equal to $HOME/.local/share should be used.
DataHome string
// DataDirs defines the preference-ordered set of base directories to
// search for data files in addition to the DataHome base directory.
// This set of directories is defined by the environment variable
// $XDG_DATA_DIRS. If this variable is not set, the default directories
// This set of directories is defined by the $XDG_DATA_DIRS environment
// variable. If the variable is not set, the default directories
// to be used are /usr/local/share and /usr/share, in that order. The
// DataHome directory is considered more important than any of the
// directories defined by DataDirs. Therefore, user data files should be
@ -37,30 +41,30 @@ var (
// ConfigHome defines the base directory relative to which user-specific
// configuration files should be written. This directory is defined by
// the environment variable $XDG_CONFIG_HOME. If this variable is not
// the $XDG_CONFIG_HOME environment variable. If the variable is not
// not set, a default equal to $HOME/.config should be used.
ConfigHome string
// ConfigDirs defines the preference-ordered set of base directories to
// search for configuration files in addition to the ConfigHome base
// directory. This set of directories is defined by the environment
// variable $XDG_CONFIG_DIRS. If this variable is not set, a default
// equal to /etc/xdg should be used. The ConfigHome directory is
// considered more important than any of the directories defined by
// ConfigDirs. Therefore, user config files should be written
// relative to the ConfigHome directory, if possible.
// directory. This set of directories is defined by the $XDG_CONFIG_DIRS
// environment variable. If the variable is not set, a default equal
// to /etc/xdg should be used. The ConfigHome directory is considered
// more important than any of the directories defined by ConfigDirs.
// Therefore, user config files should be written relative to the
// ConfigHome directory, if possible.
ConfigDirs []string
// CacheHome defines the base directory relative to which user-specific
// non-essential (cached) data should be written. This directory is
// defined by the environment variable $XDG_CACHE_HOME. If this variable
// defined by the $XDG_CACHE_HOME environment variable. If the variable
// is not set, a default equal to $HOME/.cache should be used.
CacheHome string
// RuntimeDir defines the base directory relative to which user-specific
// non-essential runtime files and other file objects (such as sockets,
// named pipes, etc.) should be stored. This directory is defined by the
// environment variable $XDG_RUNTIME_DIR. If this variable is not set,
// $XDG_RUNTIME_DIR environment variable. If the variable is not set,
// applications should fall back to a replacement directory with similar
// capabilities. Applications should use this directory for communication
// and synchronization purposes and should not place larger files in it,
@ -68,6 +72,12 @@ var (
// swapped out to disk.
RuntimeDir string
// StateHome defines the base directory relative to which user-specific
// volatile data files should be stored. This directory is defined by
// the non-standard $XDG_STATE_HOME environment variable. If the variable
// is not set, a default equal to ~/.local/state should be used.
StateHome string
// UserDirs defines the locations of well known user directories.
UserDirs UserDirectories
@ -96,6 +106,7 @@ func Reload() {
ConfigDirs = baseDirs.config
CacheHome = baseDirs.cacheHome
RuntimeDir = baseDirs.runtime
StateHome = baseDirs.stateHome
FontDirs = baseDirs.fonts
ApplicationDirs = baseDirs.applications
@ -143,6 +154,18 @@ func RuntimeFile(relPath string) (string, error) {
return baseDirs.runtimeFile(relPath)
}
// StateFile returns a suitable location for the specified state file. State
// files are usually volatile data files, not suitable to be stored relative
// to the $XDG_DATA_HOME directory.
// The relPath parameter must contain the name of the state file, and
// optionally, a set of parent directories (e.g. appname/app.state).
// If the specified directories do not exist, they will be created relative
// to the base state directory. On failure, an error containing the
// attempted paths is returned.
func StateFile(relPath string) (string, error) {
return baseDirs.stateFile(relPath)
}
// SearchDataFile searches for specified file in the data search paths.
// The relPath parameter must contain the name of the data file, and
// optionally, a set of parent directories (e.g. appname/app.data). If the
@ -175,6 +198,14 @@ func SearchRuntimeFile(relPath string) (string, error) {
return baseDirs.searchRuntimeFile(relPath)
}
// SearchStateFile searches for the specified file in the state search path.
// The relPath parameter must contain the name of the state file, and
// optionally, a set of parent directories (e.g. appname/app.state). If the
// file cannot be found, an error specifying the searched path is returned.
func SearchStateFile(relPath string) (string, error) {
return baseDirs.searchStateFile(relPath)
}
func init() {
Reload()
}

View File

@ -20,11 +20,6 @@ linters:
- wsl
- gomnd
- gocognit
- goerr113
- nolintlint
- testpackage
- godot
- nestif
linters-settings:
govet:

View File

@ -4,7 +4,7 @@ go:
- "1.13.x"
script:
- go test -v ./...
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.22.2
- ./bin/golangci-lint run
- git clean -fdx .
after_success:

View File

@ -30,14 +30,14 @@ var Awk = internal.Register(MustNewLexer(
"root": {
{`^(?=\s|/)`, Text, Push("slashstartsregex")},
Include("commentsandwhitespace"),
{`\+\+|--|\|\||&&|in\b|\$|!?~|\|&|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")},
{`\+\+|--|\|\||&&|in\b|\$|!?~|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")},
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
{`[})\].]`, Punctuation, nil},
{`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")},
{`(break|continue|do|while|exit|for|if|else|return)\b`, Keyword, Push("slashstartsregex")},
{`function\b`, KeywordDeclaration, Push("slashstartsregex")},
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil},
{`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil},
{`[@$a-zA-Z_]\w*`, NameOther, nil},
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next|nextfile|print|printf|strftime|systime|delete|system)\b`, KeywordReserved, nil},
{`(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|SUBSEP)\b`, NameBuiltin, nil},
{`[$a-zA-Z_]\w*`, NameOther, nil},
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`[0-9]+`, LiteralNumberInteger, nil},

View File

@ -36,7 +36,7 @@ var Bash = internal.Register(MustNewLexer(
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil},
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil},
{`\A#!.+\n`, CommentPreproc, nil},
{`#.*(\S|$)`, CommentSingle, nil},
{`#.*\S`, CommentSingle, nil},
{`\\[\w\W]`, LiteralStringEscape, nil},
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil},
{`[\[\]{}()=]`, Operator, nil},

View File

@ -1,206 +0,0 @@
package c
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// caddyfileCommon are the rules common to both of the lexer variants
var caddyfileCommon = Rules{
"site_block_common": {
// Import keyword
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Matcher token stub for docs
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
// These cannot have matchers but may have things that look like
// matchers in their arguments, so we just parse as a subdirective.
{`try_files`, Keyword, Push("subdirective")},
// These are special, they can nest more directives
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"matcher": {
{`\{`, Punctuation, Push("block")},
// Not can be one-liner
{`not`, Keyword, Push("deep_not_matcher")},
// Any other same-line matcher
{`[^\s#]+`, Keyword, Push("arguments")},
// Terminators
{`\n`, Text, Pop(1)},
{`\}`, Punctuation, Pop(1)},
Include("base"),
},
"block": {
{`\}`, Punctuation, Pop(2)},
// Not can be one-liner
{`not`, Keyword, Push("not_matcher")},
// Any other subdirective
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"),
},
"nested_block": {
{`\}`, Punctuation, Pop(2)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Something that starts with literally < is probably a docs stub
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("nested_directive")},
Include("base"),
},
"not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("arguments")},
{`\s+`, Text, nil},
},
"deep_not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
{`\s+`, Text, nil},
},
"directive": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"nested_directive": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"arguments": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_2"),
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\n`, Text, Pop(2)},
Include("base"),
},
"deep_subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_3"),
{`\n`, Text, Pop(3)},
Include("base"),
},
"matcher_token": {
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
},
"comments": {
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
},
"comments_pop_1": {
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
},
"comments_pop_2": {
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
},
"comments_pop_3": {
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
},
"base": {
Include("comments"),
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
{`\]|\|`, Punctuation, nil},
{`[^\s#{}$\]]+`, LiteralString, nil},
{`/[^\s#]*`, Name, nil},
{`\s+`, Text, nil},
},
}
// Caddyfile lexer.
var Caddyfile = internal.Register(MustNewLexer(
&Config{
Name: "Caddyfile",
Aliases: []string{"caddyfile", "caddy"},
Filenames: []string{"Caddyfile*"},
MimeTypes: []string{},
},
Rules{
"root": {
Include("comments"),
// Global options block
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
// Snippets
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
// Site label
{`[^#{(\s,]+`, GenericHeading, Push("label")},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")},
{`\s+`, Text, nil},
},
"globals": {
{`\}`, Punctuation, Pop(1)},
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"snippet": {
{`\}`, Punctuation, Pop(1)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Any directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"label": {
// Allow multiple labels, comma separated, newlines after
// a comma means another label is coming
{`,\s*\n?`, Text, nil},
{` `, Text, nil},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil},
// Site label
{`[^#{(\s,]+`, GenericHeading, nil},
// Comment after non-block label (hack because comments end in \n)
{`#.*\n`, CommentSingle, Push("site_block")},
// Note: if \n, we'll never pop out of the site_block, it's valid
{`\{(?=\s)|\n`, Punctuation, Push("site_block")},
},
"site_block": {
{`\}`, Punctuation, Pop(2)},
Include("site_block_common"),
},
}.Merge(caddyfileCommon),
))
// Caddyfile directive-only lexer.
var CaddyfileDirectives = internal.Register(MustNewLexer(
&Config{
Name: "Caddyfile Directives",
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
Filenames: []string{},
MimeTypes: []string{},
},
Rules{
// Same as "site_block" in Caddyfile
"root": {
Include("site_block_common"),
},
}.Merge(caddyfileCommon),
))

View File

@ -1,12 +1,15 @@
package circular
import (
"strings"
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/h"
"github.com/alecthomas/chroma/lexers/internal"
)
// PHP lexer for pure PHP code (not embedded in HTML).
var PHP = internal.Register(MustNewLexer(
// PHP lexer.
var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
&Config{
Name: "PHP",
Aliases: []string{"php", "php3", "php4", "php5"},
@ -16,65 +19,73 @@ var PHP = internal.Register(MustNewLexer(
CaseInsensitive: true,
EnsureNL: true,
},
phpCommonRules.Rename("php", "root"),
))
var phpCommonRules = Rules{
"php": {
{`\?>`, CommentPreproc, Pop(1)},
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
{`\s+`, Text, nil},
{`#.*?\n`, CommentSingle, nil},
{`//.*?\n`, CommentSingle, nil},
{`/\*\*/`, CommentMultiline, nil},
{`/\*\*.*?\*/`, LiteralStringDoc, nil},
{`/\*.*?\*/`, CommentMultiline, nil},
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil},
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil},
{`\?`, Operator, nil},
{`[\[\]{}();,]+`, Punctuation, nil},
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil},
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")},
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil},
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil},
{`(true|false|null)\b`, KeywordConstant, nil},
Include("magicconstants"),
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil},
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil},
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil},
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
{`0[0-7]+`, LiteralNumberOct, nil},
{`0x[a-f0-9]+`, LiteralNumberHex, nil},
{`\d+`, LiteralNumberInteger, nil},
{`0b[01]+`, LiteralNumberBin, nil},
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},
{`"`, LiteralStringDouble, Push("string")},
Rules{
"root": {
{`<\?(php)?`, CommentPreproc, Push("php")},
{`[^<]+`, Other, nil},
{`<`, Other, nil},
},
"php": {
{`\?>`, CommentPreproc, Pop(1)},
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
{`\s+`, Text, nil},
{`#.*?\n`, CommentSingle, nil},
{`//.*?\n`, CommentSingle, nil},
{`/\*\*/`, CommentMultiline, nil},
{`/\*\*.*?\*/`, LiteralStringDoc, nil},
{`/\*.*?\*/`, CommentMultiline, nil},
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil},
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil},
{`\?`, Operator, nil},
{`[\[\]{}();,]+`, Punctuation, nil},
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil},
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")},
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil},
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil},
{`(true|false|null)\b`, KeywordConstant, nil},
Include("magicconstants"),
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil},
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil},
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil},
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
{`0[0-7]+`, LiteralNumberOct, nil},
{`0x[a-f0-9]+`, LiteralNumberHex, nil},
{`\d+`, LiteralNumberInteger, nil},
{`0b[01]+`, LiteralNumberBin, nil},
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},
{`"`, LiteralStringDouble, Push("string")},
},
"magicfuncs": {
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil},
},
"magicconstants": {
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil},
},
"classname": {
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)},
},
"functionname": {
Include("magicfuncs"),
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)},
Default(Pop(1)),
},
"string": {
{`"`, LiteralStringDouble, Pop(1)},
{`[^{$"\\]+`, LiteralStringDouble, nil},
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil},
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil},
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
{`[${\\]`, LiteralStringDouble, nil},
},
},
"magicfuncs": {
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil},
},
"magicconstants": {
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil},
},
"classname": {
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)},
},
"functionname": {
Include("magicfuncs"),
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)},
Default(Pop(1)),
},
"string": {
{`"`, LiteralStringDouble, Pop(1)},
{`[^{$"\\]+`, LiteralStringDouble, nil},
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil},
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil},
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
{`[${\\]`, LiteralStringDouble, nil},
},
}
).SetAnalyser(func(text string) float32 {
if strings.Contains(text, "<?php") {
return 0.5
}
return 0.0
})))

View File

@ -1,34 +0,0 @@
package circular
import (
"strings"
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/h"
"github.com/alecthomas/chroma/lexers/internal"
)
// PHTML lexer is PHP in HTML.
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
&Config{
Name: "PHTML",
Aliases: []string{"phtml"},
Filenames: []string{"*.phtml"},
MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5"},
DotAll: true,
CaseInsensitive: true,
EnsureNL: true,
},
Rules{
"root": {
{`<\?(php)?`, CommentPreproc, Push("php")},
{`[^<]+`, Other, nil},
{`<`, Other, nil},
},
}.Merge(phpCommonRules),
).SetAnalyser(func(text string) float32 {
if strings.Contains(text, "<?php") {
return 0.5
}
return 0.0
})))

View File

@ -28,13 +28,6 @@ var Elixir = internal.Register(MustNewLexer(
{`:"`, LiteralStringSymbol, Push("string_double_atom")},
{`:'`, LiteralStringSymbol, Push("string_single_atom")},
{`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil},
{`(fn|do|end|after|else|rescue|catch)\b`, Keyword, nil},
{`(not|and|or|when|in)\b`, OperatorWord, nil},
{`(case|cond|for|if|unless|try|receive|raise|quote|unquote|unquote_splicing|throw|super|while)\b`, Keyword, nil},
{`(def|defp|defmodule|defprotocol|defmacro|defmacrop|defdelegate|defexception|defstruct|defimpl|defcallback)\b`, KeywordDeclaration, nil},
{`(import|require|use|alias)\b`, KeywordNamespace, nil},
{`(nil|true|false)\b`, NameConstant, nil},
{`(_|__MODULE__|__DIR__|__ENV__|__CALLER__)\b`, NamePseudo, nil},
{`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil},
{`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil},
{`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil},

View File

@ -15,7 +15,6 @@ var Go = internal.Register(MustNewLexer(
Aliases: []string{"go", "golang"},
Filenames: []string{"*.go"},
MimeTypes: []string{"text/x-gosrc"},
EnsureNL: true,
},
Rules{
"root": {

View File

@ -19,8 +19,8 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
},
Rules{
"root": {
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
{`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
},
"headers": {
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},

View File

@ -10,7 +10,7 @@ var Ini = internal.Register(MustNewLexer(
&Config{
Name: "INI",
Aliases: []string{"ini", "cfg", "dosini"},
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"},
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig"},
MimeTypes: []string{"text/x-ini", "text/inf"},
},
Rules{

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -24,71 +24,32 @@ var Kotlin = internal.Register(MustNewLexer(
{`//[^\n]*\n?`, CommentSingle, nil},
{`/[*].*?[*]/`, CommentMultiline, nil},
{`\n`, Text, nil},
{`!==|!in|!is|===`, Operator, nil},
{`%=|&&|\*=|\+\+|\+=|--|-=|->|\.\.|\/=|::|<=|==|>=|!!|!=|\|\||\?[:.]`, Operator, nil},
{`[~!%^&*()+=|\[\]:;,.<>\/?-]`, Punctuation, nil},
{`::|!!|\?[:.]`, Operator, nil},
{`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil},
{`[{}]`, Punctuation, nil},
{`"""`, LiteralString, Push("rawstring")},
{`"`, LiteralStringDouble, Push("string")},
{`(')(\\u[0-9a-fA-F]{4})(')`, ByGroups(LiteralStringChar, LiteralStringEscape, LiteralStringChar), nil},
{`"""[^"]*"""`, LiteralString, nil},
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil},
{`'\\.'|'[^\\]'`, LiteralStringChar, nil},
{`0[xX][0-9a-fA-F]+[Uu]?[Ll]?|[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?[fF]?[Uu]?[Ll]?`, LiteralNumber, nil},
{`(companion)(\s+)(object)`, ByGroups(Keyword, Text, Keyword), nil},
{`(class|interface|object)(\s+)`, ByGroups(Keyword, Text), Push("class")},
{`(package|import)(\s+)`, ByGroups(Keyword, Text), Push("package")},
{`(val|var)(\s+)`, ByGroups(Keyword, Text), Push("property")},
{`(fun)(\s+)`, ByGroups(Keyword, Text), Push("function")},
{`(abstract|actual|annotation|as|as\?|break|by|catch|class|companion|const|constructor|continue|crossinline|data|delegate|do|dynamic|else|enum|expect|external|false|field|file|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|it|lateinit|noinline|null|object|open|operator|out|override|package|param|private|property|protected|public|receiver|reified|return|sealed|set|setparam|super|suspend|tailrec|this|throw|true|try|typealias|typeof|val|var|vararg|when|where|while)\b`, Keyword, nil},
{`@[` + kotlinIdentifier + `]+`, NameDecorator, nil},
{`[` + kotlinIdentifier + `]+`, Name, nil},
{`(fun)(\s+)(<[^>]*>\s+)?`, ByGroups(Keyword, Text, Text), Push("function")},
{`(abstract|actual|annotation|as|break|by|catch|class|companion|const|constructor|continue|crossinline|data|do|dynamic|else|enum|expect|external|false|final|finally|for|fun|get|if|import|in|infix|inline|inner|interface|internal|is|lateinit|noinline|null|object|open|operator|out|override|package|private|protected|public|reified|return|sealed|set|super|suspend|tailrec|this|throw|true|try|val|var|vararg|when|where|while)\b`, Keyword, nil},
{"(@?[" + kotlinIdentifier + "]*`)", Name, nil},
},
"package": {
{`\S+`, NameNamespace, Pop(1)},
},
"class": {
// \x60 is the back tick character (`)
{`\x60[^\x60]+?\x60`, NameClass, Pop(1)},
{`[` + kotlinIdentifier + `]+`, NameClass, Pop(1)},
{"(@?[" + kotlinIdentifier + "]*`)", NameClass, Pop(1)},
},
"property": {
{`\x60[^\x60]+?\x60`, NameProperty, Pop(1)},
{`[` + kotlinIdentifier + `]+`, NameProperty, Pop(1)},
},
"generics-specification": {
{`<`, Punctuation, Push("generics-specification")}, // required for generics inside generics e.g. <T : List<Int> >
{`>`, Punctuation, Pop(1)},
{`[,:*?]`, Punctuation, nil},
{`(in|out|reified)`, Keyword, nil},
{`\x60[^\x60]+?\x60`, NameClass, nil},
{`[` + kotlinIdentifier + `]+`, NameClass, nil},
{`\s+`, Text, nil},
{"(@?[" + kotlinIdentifier + " ]*`)", NameProperty, Pop(1)},
},
"function": {
{`<`, Punctuation, Push("generics-specification")},
{`\x60[^\x60]+?\x60`, NameFunction, Pop(1)},
{`[` + kotlinIdentifier + `]+`, NameFunction, Pop(1)},
{`\s+`, Text, nil},
},
"rawstring": {
// raw strings don't allow character escaping
{`"""`, LiteralString, Pop(1)},
{`(?:[^$"]+|\"{1,2}[^"])+`, LiteralString, nil},
Include("string-interpol"),
// remaining dollar signs are just a string
{`\$`, LiteralString, nil},
},
"string": {
{`\\[tbnr'"\\\$]`, LiteralStringEscape, nil},
{`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil},
{`"`, LiteralStringDouble, Pop(1)},
Include("string-interpol"),
{`[^\n\\"$]+`, LiteralStringDouble, nil},
// remaining dollar signs are just a string
{`\$`, LiteralStringDouble, nil},
},
"string-interpol": {
{`\$[` + kotlinIdentifier + `]+`, LiteralStringInterpol, nil},
{`\${[^}\n]*}`, LiteralStringInterpol, nil},
{"(@?[" + kotlinIdentifier + " ]*`)", NameFunction, Pop(1)},
},
},
))

View File

@ -32,7 +32,6 @@ import (
_ "github.com/alecthomas/chroma/lexers/w"
_ "github.com/alecthomas/chroma/lexers/x"
_ "github.com/alecthomas/chroma/lexers/y"
_ "github.com/alecthomas/chroma/lexers/z"
)
// Registry of Lexers.

View File

@ -1,59 +0,0 @@
package p
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Pony lexer.
var Pony = internal.Register(MustNewLexer(
&Config{
Name: "Pony",
Aliases: []string{"pony"},
Filenames: []string{"*.pony"},
MimeTypes: []string{},
},
Rules{
"root": {
{`\n`, Text, nil},
{`[^\S\n]+`, Text, nil},
{`//.*\n`, CommentSingle, nil},
{`/\*`, CommentMultiline, Push("nested_comment")},
{`"""(?:.|\n)*?"""`, LiteralStringDoc, nil},
{`"`, LiteralString, Push("string")},
{`\'.*\'`, LiteralStringChar, nil},
{`=>|[]{}:().~;,|&!^?[]`, Punctuation, nil},
{Words(``, `\b`, `addressof`, `and`, `as`, `consume`, `digestof`, `is`, `isnt`, `not`, `or`), OperatorWord, nil},
{`!=|==|<<|>>|[-+/*%=<>]`, Operator, nil},
{Words(``, `\b`, `box`, `break`, `compile_error`, `compile_intrinsic`, `continue`, `do`, `else`, `elseif`, `embed`, `end`, `error`, `for`, `if`, `ifdef`, `in`, `iso`, `lambda`, `let`, `match`, `object`, `recover`, `ref`, `repeat`, `return`, `tag`, `then`, `this`, `trn`, `try`, `until`, `use`, `var`, `val`, `where`, `while`, `with`, `#any`, `#read`, `#send`, `#share`), Keyword, nil},
{`(actor|class|struct|primitive|interface|trait|type)((?:\s)+)`, ByGroups(Keyword, Text), Push("typename")},
{`(new|fun|be)((?:\s)+)`, ByGroups(Keyword, Text), Push("methodname")},
{Words(``, `\b`, `U8`, `U16`, `U32`, `U64`, `ULong`, `USize`, `U128`, `Unsigned`, `Stringable`, `String`, `StringBytes`, `StringRunes`, `InputNotify`, `InputStream`, `Stdin`, `ByteSeq`, `ByteSeqIter`, `OutStream`, `StdStream`, `SourceLoc`, `I8`, `I16`, `I32`, `I64`, `ILong`, `ISize`, `I128`, `Signed`, `Seq`, `RuntimeOptions`, `Real`, `Integer`, `SignedInteger`, `UnsignedInteger`, `FloatingPoint`, `Number`, `Int`, `ReadSeq`, `ReadElement`, `Pointer`, `Platform`, `NullablePointer`, `None`, `Iterator`, `F32`, `F64`, `Float`, `Env`, `DoNotOptimise`, `DisposableActor`, `Less`, `Equal`, `Greater`, `Compare`, `HasEq`, `Equatable`, `Comparable`, `Bool`, `AsioEventID`, `AsioEventNotify`, `AsioEvent`, `Array`, `ArrayKeys`, `ArrayValues`, `ArrayPairs`, `Any`, `AmbientAuth`), KeywordType, nil},
{`_?[A-Z]\w*`, NameClass, nil},
{`string\(\)`, NameOther, nil},
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`\d+`, LiteralNumberInteger, nil},
{`(true|false)\b`, Keyword, nil},
{`_\d*`, Name, nil},
{`_?[a-z][\w\'_]*`, Name, nil},
},
"typename": {
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)`, ByGroups(Keyword, Text, NameClass), Pop(1)},
},
"methodname": {
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)`, ByGroups(Keyword, Text, NameFunction), Pop(1)},
},
"nested_comment": {
{`[^*/]+`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push()},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
"string": {
{`"`, LiteralString, Pop(1)},
{`\\"`, LiteralString, nil},
{`[^\\"]+`, LiteralString, nil},
},
},
))

View File

@ -22,7 +22,7 @@ var TOML = internal.Register(MustNewLexer(
{`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil},
{`"(\\\\|\\"|[^"])*"`, StringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, StringSingle, nil},
{`[.,=\[\]{}]`, Punctuation, nil},
{`[.,=\[\]]`, Punctuation, nil},
{`[^\W\d]\w*`, NameOther, nil},
},
},

View File

@ -38,14 +38,14 @@ var TypeScript = internal.Register(MustNewLexer(
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
{`[})\].]`, Punctuation, nil},
{`(for|in|of|while|do|break|return|yield|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|keyof|asserts|is|infer|await|void|this)\b`, Keyword, Push("slashstartsregex")},
{`(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|this)\b`, Keyword, Push("slashstartsregex")},
{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")},
{`(abstract|async|boolean|class|const|debugger|enum|export|extends|from|get|global|goto|implements|import|interface|namespace|package|private|protected|public|readonly|require|set|static|super|type)\b`, KeywordReserved, nil},
{`(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b`, KeywordReserved, nil},
{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil},
{`(Array|Boolean|Date|Error|Function|Math|Number|Object|Packages|RegExp|String|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
{`(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
{`\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)`, ByGroups(KeywordReserved, Text, NameOther, Text), Push("slashstartsregex")},
{`\b(string|bool|number|any|never|object|symbol|unique|unknown|bigint)\b`, KeywordType, nil},
{`\b(constructor|declare|interface|as)\b`, KeywordReserved, nil},
{`\b(string|bool|number)\b`, KeywordType, nil},
{`\b(constructor|declare|interface|as|AS)\b`, KeywordReserved, nil},
{`(super)(\s*)(\([\w,?.$\s]+\s*\))`, ByGroups(KeywordReserved, Text), Push("slashstartsregex")},
{`([a-zA-Z_?.$][\w?.$]*)\(\) \{`, NameOther, Push("slashstartsregex")},
{`([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)`, ByGroups(NameOther, Text, KeywordType), nil},

View File

@ -15,36 +15,32 @@ var YAML = internal.Register(MustNewLexer(
Rules{
"root": {
Include("whitespace"),
{`^---`, NameNamespace, nil},
{`^\.\.\.`, NameNamespace, nil},
{`^---`, Text, nil},
{`[\n?]?\s*- `, Text, nil},
{`#.*$`, Comment, nil},
{`!![^\s]+`, CommentPreproc, nil},
{`&[^\s]+`, CommentPreproc, nil},
{`\*[^\s]+`, CommentPreproc, nil},
{`^%include\s+[^\n\r]+`, CommentPreproc, nil},
{`([>|+-]\s+)(\s+)((?:(?:.*?$)(?:[\n\r]*?)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil},
Include("key"),
Include("value"),
{`[?:,\[\]]`, Punctuation, nil},
{`.`, Text, nil},
},
"value": {
{`([>|](?:[+-])?)(\n(^ {1,})(?:.*\n*(?:^\3 *).*)*)`, ByGroups(Punctuation, StringDoc, Whitespace), nil},
{Words(``, `\b`, "true", "True", "TRUE", "false", "False", "FALSE", "null",
"y", "Y", "yes", "Yes", "YES", "n", "N", "no", "No", "NO",
"on", "On", "ON", "off", "Off", "OFF"), KeywordConstant, nil},
{Words(``, `\b`, "true", "false", "null"), KeywordConstant, nil},
{`"(?:\\.|[^"])*"`, StringDouble, nil},
{`'(?:\\.|[^'])*'`, StringSingle, nil},
{`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil},
{`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil},
{`([^\{\}\[\]\?,\:\!\-\*&\@].*)( )+(#.*)`, ByGroups(Literal, Whitespace, Comment), nil},
{`[^\{\}\[\]\?,\:\!\-\*&\@].*`, Literal, nil},
{`\b[\w]+\b`, Text, nil},
},
"key": {
{`"[^"\n].*": `, NameTag, nil},
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, NameTag, Punctuation, Whitespace), nil},
{`([^"\n{]*)(:)( )`, ByGroups(NameTag, Punctuation, Whitespace), nil},
{`([^"\n{]*)(:)(\n)`, ByGroups(NameTag, Punctuation, Whitespace), nil},
{`"[^"\n].*": `, Keyword, nil},
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, Keyword, Punctuation, Whitespace), nil},
{`([^"\n{]*)(:)( )`, ByGroups(Keyword, Punctuation, Whitespace), nil},
{`([^"\n{]*)(:)(\n)`, ByGroups(Keyword, Punctuation, Whitespace), nil},
},
"whitespace": {
{`\s+`, Whitespace, nil},

View File

@ -1,54 +0,0 @@
package z
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Zig lexer.
var Zig = internal.Register(MustNewLexer(
&Config{
Name: "Zig",
Aliases: []string{"zig"},
Filenames: []string{"*.zig"},
MimeTypes: []string{"text/zig"},
},
Rules{
"root": {
{`\n`, TextWhitespace, nil},
{`\s+`, TextWhitespace, nil},
{`//.*?\n`, CommentSingle, nil},
{Words(``, `\b`, `break`, `return`, `continue`, `asm`, `defer`, `errdefer`, `unreachable`, `try`, `catch`, `async`, `await`, `suspend`, `resume`, `cancel`), Keyword, nil},
{Words(``, `\b`, `const`, `var`, `extern`, `packed`, `export`, `pub`, `noalias`, `inline`, `comptime`, `nakedcc`, `stdcallcc`, `volatile`, `allowzero`, `align`, `linksection`, `threadlocal`), KeywordReserved, nil},
{Words(``, `\b`, `struct`, `enum`, `union`, `error`), Keyword, nil},
{Words(``, `\b`, `while`, `for`), Keyword, nil},
{Words(``, `\b`, `bool`, `f16`, `f32`, `f64`, `f128`, `void`, `noreturn`, `type`, `anyerror`, `promise`, `i0`, `u0`, `isize`, `usize`, `comptime_int`, `comptime_float`, `c_short`, `c_ushort`, `c_int`, `c_uint`, `c_long`, `c_ulong`, `c_longlong`, `c_ulonglong`, `c_longdouble`, `c_voidi8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`), KeywordType, nil},
{Words(``, `\b`, `true`, `false`, `null`, `undefined`), KeywordConstant, nil},
{Words(``, `\b`, `if`, `else`, `switch`, `and`, `or`, `orelse`), Keyword, nil},
{Words(``, `\b`, `fn`, `usingnamespace`, `test`), Keyword, nil},
{`0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil},
{`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil},
{`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil},
{`0b[01]+`, LiteralNumberBin, nil},
{`0o[0-7]+`, LiteralNumberOct, nil},
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`[0-9]+`, LiteralNumberInteger, nil},
{`@[a-zA-Z_]\w*`, NameBuiltin, nil},
{`[a-zA-Z_]\w*`, Name, nil},
{`\'\\\'\'`, LiteralStringEscape, nil},
{`\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'`, LiteralStringEscape, nil},
{`\'[^\\\']\'`, LiteralString, nil},
{`\\\\[^\n]*`, LiteralStringHeredoc, nil},
{`c\\\\[^\n]*`, LiteralStringHeredoc, nil},
{`c?"`, LiteralString, Push("string")},
{`[+%=><|^!?/\-*&~:]`, Operator, nil},
{`[{}()\[\],.;]`, Punctuation, nil},
},
"string": {
{`\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])`, LiteralStringEscape, nil},
{`[^\\"\n]+`, LiteralString, nil},
{`"`, LiteralString, Pop(1)},
},
},
))

View File

@ -6,7 +6,6 @@ import (
"regexp"
"strings"
"sync"
"time"
"unicode/utf8"
"github.com/dlclark/regexp2"
@ -161,14 +160,6 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
// Rules maps from state to a sequence of Rules.
type Rules map[string][]Rule
// Rename clones rules then a rule.
func (r Rules) Rename(old, new string) Rules {
r = r.Clone()
r[new] = r[old]
delete(r, old)
return r
}
// Clone returns a clone of the Rules.
func (r Rules) Clone() Rules {
out := map[string][]Rule{}
@ -179,15 +170,6 @@ func (r Rules) Clone() Rules {
return out
}
// Merge creates a clone of "r" then merges "rules" into the clone.
func (r Rules) Merge(rules Rules) Rules {
out := r.Clone()
for k, v := range rules.Clone() {
out[k] = v
}
return out
}
// MustNewLexer creates a new Lexer or panics.
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
lexer, err := NewLexer(config, rules)
@ -394,7 +376,6 @@ func (r *RegexLexer) maybeCompile() (err error) {
if err != nil {
return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
}
rule.Regexp.MatchTimeout = time.Millisecond * 250
}
}
}

View File

@ -65,11 +65,11 @@ var examples = []string{
"Mon, 02 Jan 2006 15:04:05 MST",
"Tue, 11 Jul 2017 16:28:13 +0200 (CEST)",
"Mon, 02 Jan 2006 15:04:05 -0700",
"Thu, 4 Jan 2018 17:53:36 +0000",
"Mon 30 Sep 2018 09:09:09 PM UTC",
"Mon Aug 10 15:44:11 UTC+0100 2015",
"Thu, 4 Jan 2018 17:53:36 +0000",
"Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)",
"Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00)",
"September 17, 2012 10:09am",
"September 17, 2012 at 10:09am PST-08",
"September 17, 2012, 10:10:09",
@ -77,11 +77,15 @@ var examples = []string{
"October 7th, 1970",
"12 Feb 2006, 19:17",
"12 Feb 2006 19:17",
"14 May 2019 19:11:40.164",
"7 oct 70",
"7 oct 1970",
"03 February 2013",
"1 July 2013",
"2013-Feb-03",
// dd/Mon/yyy alpha Months
"06/Jan/2008:15:04:05 -0700",
"06/Jan/2008 15:04:05 -0700",
// mm/dd/yy
"3/31/2014",
"03/31/2014",
@ -123,7 +127,10 @@ var examples = []string{
"2006-01-02T15:04:05+0000",
"2009-08-12T22:15:09-07:00",
"2009-08-12T22:15:09",
"2009-08-12T22:15:09.988",
"2009-08-12T22:15:09Z",
"2017-07-19T03:21:51:897+0100",
"2019-05-29T08:41-04", // no seconds, 2 digit TZ offset
// yyyy-mm-dd hh:mm:ss
"2014-04-26 17:24:37.3186369",
"2012-08-03 18:31:59.257000000",
@ -147,6 +154,8 @@ var examples = []string{
"2014-04",
"2014",
"2014-05-11 08:20:13,787",
// yyyy-mm-dd-07:00
"2020-07-20+08:00",
// mm.dd.yy
"3.31.2014",
"03.31.2014",
@ -156,6 +165,9 @@ var examples = []string{
// yyyymmdd and similar
"20140601",
"20140722105203",
// yymmdd hh:mm:yy mysql log
// 080313 05:21:55 mysqld started
"171113 14:14:20",
// unix seconds, ms, micro, nano
"1332151919",
"1384216367189",
@ -214,6 +226,7 @@ func main() {
| Mon Aug 10 15:44:11 UTC+0100 2015 | 2015-08-10 15:44:11 +0000 UTC |
| Thu, 4 Jan 2018 17:53:36 +0000 | 2018-01-04 17:53:36 +0000 UTC |
| Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) | 2015-07-03 18:04:07 +0100 GMT |
| Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00) | 2021-01-03 00:12:23 +0800 +0800 |
| September 17, 2012 10:09am | 2012-09-17 10:09:00 +0000 UTC |
| September 17, 2012 at 10:09am PST-08 | 2012-09-17 10:09:00 -0800 PST |
| September 17, 2012, 10:10:09 | 2012-09-17 10:10:09 +0000 UTC |
@ -221,11 +234,14 @@ func main() {
| October 7th, 1970 | 1970-10-07 00:00:00 +0000 UTC |
| 12 Feb 2006, 19:17 | 2006-02-12 19:17:00 +0000 UTC |
| 12 Feb 2006 19:17 | 2006-02-12 19:17:00 +0000 UTC |
| 14 May 2019 19:11:40.164 | 2019-05-14 19:11:40.164 +0000 UTC |
| 7 oct 70 | 1970-10-07 00:00:00 +0000 UTC |
| 7 oct 1970 | 1970-10-07 00:00:00 +0000 UTC |
| 03 February 2013 | 2013-02-03 00:00:00 +0000 UTC |
| 1 July 2013 | 2013-07-01 00:00:00 +0000 UTC |
| 2013-Feb-03 | 2013-02-03 00:00:00 +0000 UTC |
| 06/Jan/2008:15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
| 06/Jan/2008 15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
| 3/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
| 03/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
| 08/21/71 | 1971-08-21 00:00:00 +0000 UTC |
@ -250,11 +266,22 @@ func main() {
| 2014/4/02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
| 2012/03/19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
| 2012/03/19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
| 2014:3:31 | 2014-03-31 00:00:00 +0000 UTC |
| 2014:03:31 | 2014-03-31 00:00:00 +0000 UTC |
| 2014:4:8 22:05 | 2014-04-08 22:05:00 +0000 UTC |
| 2014:04:08 22:05 | 2014-04-08 22:05:00 +0000 UTC |
| 2014:04:2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
| 2014:4:02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
| 2012:03:19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
| 2012:03:19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
| 2014年04月08日 | 2014-04-08 00:00:00 +0000 UTC |
| 2006-01-02T15:04:05+0000 | 2006-01-02 15:04:05 +0000 UTC |
| 2009-08-12T22:15:09-07:00 | 2009-08-12 22:15:09 -0700 -0700 |
| 2009-08-12T22:15:09 | 2009-08-12 22:15:09 +0000 UTC |
| 2009-08-12T22:15:09.988 | 2009-08-12 22:15:09.988 +0000 UTC |
| 2009-08-12T22:15:09Z | 2009-08-12 22:15:09 +0000 UTC |
| 2017-07-19T03:21:51:897+0100 | 2017-07-19 03:21:51.897 +0100 +0100 |
| 2019-05-29T08:41-04 | 2019-05-29 08:41:00 -0400 -0400 |
| 2014-04-26 17:24:37.3186369 | 2014-04-26 17:24:37.3186369 +0000 UTC |
| 2012-08-03 18:31:59.257000000 | 2012-08-03 18:31:59.257 +0000 UTC |
| 2014-04-26 17:24:37.123 | 2014-04-26 17:24:37.123 +0000 UTC |
@ -277,6 +304,7 @@ func main() {
| 2014-04 | 2014-04-01 00:00:00 +0000 UTC |
| 2014 | 2014-01-01 00:00:00 +0000 UTC |
| 2014-05-11 08:20:13,787 | 2014-05-11 08:20:13.787 +0000 UTC |
| 2020-07-20+08:00 | 2020-07-20 00:00:00 +0800 +0800 |
| 3.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
| 03.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
| 08.21.71 | 1971-08-21 00:00:00 +0000 UTC |
@ -284,6 +312,7 @@ func main() {
| 2014.03.30 | 2014-03-30 00:00:00 +0000 UTC |
| 20140601 | 2014-06-01 00:00:00 +0000 UTC |
| 20140722105203 | 2014-07-22 10:52:03 +0000 UTC |
| 171113 14:14:20 | 2017-11-13 14:14:20 +0000 UTC |
| 1332151919 | 2012-03-19 10:11:59 +0000 UTC |
| 1384216367189 | 2013-11-12 00:32:47.189 +0000 UTC |
| 1384216367111222 | 2013-11-12 00:32:47.111222 +0000 UTC |

View File

@ -3,7 +3,7 @@ module github.com/araddon/dateparse
go 1.12
require (
github.com/mattn/go-runewidth v0.0.9 // indirect
github.com/mattn/go-runewidth v0.0.10 // indirect
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4
github.com/stretchr/testify v1.6.1
github.com/stretchr/testify v1.7.0
)

View File

@ -1,14 +1,17 @@
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.10 h1:CoZ3S2P7pvtP45xOtBw+/mDL2z0RKI576gSkzRRpdGg=
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rivo/uniseg v0.1.0 h1:+2KBaVoUmb9XzDsrx/Ct0W/EYOSFf/nWTauy++DprtY=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4 h1:8qmTC5ByIXO3GP/IzBkxcZ/99VITvnIETDhdFz/om7A=
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=

View File

@ -55,37 +55,41 @@ type timeState uint8
const (
dateStart dateState = iota // 0
dateDigit
dateDigitSt
dateYearDash
dateYearDashAlphaDash
dateYearDashDash
dateYearDashDashWs // 5
dateYearDashDashT
dateYearDashDashOffset
dateDigitDash
dateDigitDashAlpha
dateDigitDashAlphaDash
dateDigitDot // 10
dateDigitDashAlphaDash // 10
dateDigitDot
dateDigitDotDot
dateDigitSlash
dateDigitYearSlash
dateDigitSlashAlpha // 15
dateDigitColon
dateDigitChineseYear
dateDigitChineseYearWs // 15
dateDigitChineseYearWs
dateDigitWs
dateDigitWsMoYear
dateDigitWsMoYear // 20
dateDigitWsMolong
dateAlpha
dateAlphaWs // 20
dateAlphaWs
dateAlphaWsDigit
dateAlphaWsDigitMore
dateAlphaWsDigitMore // 25
dateAlphaWsDigitMoreWs
dateAlphaWsDigitMoreWsYear
dateAlphaWsMonth // 25
dateAlphaWsMonth
dateAlphaWsDigitYearmaybe
dateAlphaWsMonthMore
dateAlphaWsMonthSuffix
dateAlphaWsMore
dateAlphaWsAtTime // 30
dateAlphaWsAtTime
dateAlphaWsAlpha
dateAlphaWsAlphaYearmaybe
dateAlphaWsAlphaYearmaybe // 35
dateAlphaPeriodWsDigit
dateWeekdayComma
dateWeekdayAbbrevComma
@ -267,7 +271,7 @@ iterRunes:
i += (bytesConsumed - 1)
}
//gou.Debugf("i=%d r=%s state=%d %s", i, string(r), p.stateDate, datestr)
// gou.Debugf("i=%d r=%s state=%d %s", i, string(r), p.stateDate, datestr)
switch p.stateDate {
case dateStart:
if unicode.IsDigit(r) {
@ -295,17 +299,37 @@ iterRunes:
p.stateDate = dateDigitDash
}
case '/':
// 08/May/2005
// 03/31/2005
// 2014/02/24
p.stateDate = dateDigitSlash
if i == 4 {
p.yearlen = i
// 2014/02/24 - Year first /
p.yearlen = i // since it was start of datestr, i=len
p.moi = i + 1
p.setYear()
p.stateDate = dateDigitYearSlash
} else {
// Either Ambiguous dd/mm vs mm/dd OR dd/month/yy
// 08/May/2005
// 03/31/2005
// 31/03/2005
if i+2 < len(p.datestr) && unicode.IsLetter(rune(datestr[i+1])) {
// 08/May/2005
p.stateDate = dateDigitSlashAlpha
p.moi = i + 1
p.daylen = 2
p.dayi = 0
p.setDay()
continue
}
// Ambiguous dd/mm vs mm/dd the bane of date-parsing
// 03/31/2005
// 31/03/2005
p.ambiguousMD = true
if p.preferMonthFirst {
if p.molen == 0 {
// 03/31/2005
p.molen = i
p.setMonth()
p.dayi = i + 1
@ -317,6 +341,7 @@ iterRunes:
p.moi = i + 1
}
}
}
case ':':
@ -363,9 +388,13 @@ iterRunes:
// 02 Jan 2018 23:59:34
// 12 Feb 2006, 19:17
// 12 Feb 2006, 19:17:22
p.stateDate = dateDigitWs
p.dayi = 0
p.daylen = i
if i == 6 {
p.stateDate = dateDigitSt
} else {
p.stateDate = dateDigitWs
p.dayi = 0
p.daylen = i
}
case '年':
// Chinese Year
p.stateDate = dateDigitChineseYear
@ -376,9 +405,15 @@ iterRunes:
}
p.part1Len = i
case dateDigitSt:
p.set(0, "060102")
i = i - 1
p.stateTime = timeStart
break iterRunes
case dateYearDash:
// dateYearDashDashT
// 2006-01-02T15:04:05Z07:00
// 2020-08-17T17:00:00:000+0100
// dateYearDashDashWs
// 2013-04-01 22:43:22
// dateYearDashAlphaDash
@ -400,7 +435,14 @@ iterRunes:
// 2006-01-02T15:04:05Z07:00
// dateYearDashDashWs
// 2013-04-01 22:43:22
// dateYearDashDashOffset
// 2020-07-20+00:00
switch r {
case '+', '-':
p.offseti = i
p.daylen = i - p.dayi
p.stateDate = dateYearDashDashOffset
p.setDay()
case ' ':
p.daylen = i - p.dayi
p.stateDate = dateYearDashDashWs
@ -414,6 +456,21 @@ iterRunes:
p.setDay()
break iterRunes
}
case dateYearDashDashT:
// dateYearDashDashT
// 2006-01-02T15:04:05Z07:00
// 2020-08-17T17:00:00:000+0100
case dateYearDashDashOffset:
// 2020-07-20+00:00
switch r {
case ':':
p.set(p.offseti, "-07:00")
// case ' ':
// return nil, unknownErr(datestr)
}
case dateYearDashAlphaDash:
// 2013-Feb-03
switch r {
@ -446,7 +503,7 @@ iterRunes:
case dateDigitDashAlphaDash:
// 13-Feb-03 ambiguous
// 28-Feb-03 ambiguous
// 29-Jun-2016
// 29-Jun-2016 dd-month(alpha)-yyyy
switch r {
case ' ':
// we need to find if this was 4 digits, aka year
@ -476,8 +533,49 @@ iterRunes:
break iterRunes
}
case dateDigitSlash:
case dateDigitYearSlash:
// 2014/07/10 06:55:38.156283
// I honestly don't know if this format ever shows up as yyyy/
switch r {
case ' ', ':':
p.stateTime = timeStart
if p.daylen == 0 {
p.daylen = i - p.dayi
p.setDay()
}
break iterRunes
case '/':
if p.molen == 0 {
p.molen = i - p.moi
p.setMonth()
p.dayi = i + 1
}
}
case dateDigitSlashAlpha:
// 06/May/2008
switch r {
case '/':
// |
// 06/May/2008
if p.molen == 0 {
p.set(p.moi, "Jan")
p.yeari = i + 1
}
// We aren't breaking because we are going to re-use this case
// to find where the date starts, and possible time begins
case ' ', ':':
p.stateTime = timeStart
if p.yearlen == 0 {
p.yearlen = i - p.yeari
p.setYear()
}
break iterRunes
}
case dateDigitSlash:
// 03/19/2012 10:11:59
// 04/2/2014 03:00:37
// 3/1/2012 10:11:59
@ -488,25 +586,9 @@ iterRunes:
// 1/2/06
switch r {
case ' ':
p.stateTime = timeStart
if p.yearlen == 0 {
p.yearlen = i - p.yeari
p.setYear()
} else if p.daylen == 0 {
p.daylen = i - p.dayi
p.setDay()
}
break iterRunes
case '/':
if p.yearlen > 0 {
// 2014/07/10 06:55:38.156283
if p.molen == 0 {
p.molen = i - p.moi
p.setMonth()
p.dayi = i + 1
}
} else if p.preferMonthFirst {
// This is the 2nd / so now we should know start pts of all of the dd, mm, yy
if p.preferMonthFirst {
if p.daylen == 0 {
p.daylen = i - p.dayi
p.setDay()
@ -519,6 +601,15 @@ iterRunes:
p.yeari = i + 1
}
}
// Note no break, we are going to pass by and re-enter this dateDigitSlash
// and look for ending (space) or not (just date)
case ' ':
p.stateTime = timeStart
if p.yearlen == 0 {
p.yearlen = i - p.yeari
p.setYear()
}
break iterRunes
}
case dateDigitColon:
@ -718,8 +809,7 @@ iterRunes:
case r == ',':
// Mon, 02 Jan 2006
// p.moi = 0
// p.molen = i
if i == 3 {
p.stateDate = dateWeekdayAbbrevComma
p.set(0, "Mon")
@ -1039,7 +1129,7 @@ iterRunes:
for ; i < len(datestr); i++ {
r := rune(datestr[i])
//gou.Debugf("%d %s %d iterTimeRunes %s %s", i, string(r), p.stateTime, p.ds(), p.ts())
// gou.Debugf("i=%d r=%s state=%d iterTimeRunes %s %s", i, string(r), p.stateTime, p.ds(), p.ts())
switch p.stateTime {
case timeStart:
@ -1096,7 +1186,12 @@ iterRunes:
// 22:18+0530
p.minlen = i - p.mini
} else {
p.seclen = i - p.seci
if p.seclen == 0 {
p.seclen = i - p.seci
}
if p.msi > 0 && p.mslen == 0 {
p.mslen = i - p.msi
}
}
p.offseti = i
case '.':
@ -1154,6 +1249,19 @@ iterRunes:
} else if p.seci == 0 {
p.seci = i + 1
p.minlen = i - p.mini
} else if p.seci > 0 {
// 18:31:59:257 ms uses colon, wtf
p.seclen = i - p.seci
p.set(p.seci, "05")
p.msi = i + 1
// gross, gross, gross. manipulating the datestr is horrible.
// https://github.com/araddon/dateparse/issues/117
// Could not get the parsing to work using golang time.Parse() without
// replacing that colon with period.
p.set(i, ".")
datestr = datestr[0:i] + "." + datestr[i+1:]
p.datestr = datestr
}
}
case timeOffset:
@ -1201,7 +1309,6 @@ iterRunes:
// 17:57:51 MST 2009
p.tzi = i
p.stateTime = timeWsAlpha
//break iterTimeRunes
} else if unicode.IsDigit(r) {
// 00:12:00 2008
p.stateTime = timeWsYear
@ -1231,6 +1338,7 @@ iterRunes:
p.offseti = i
case ' ':
// 17:57:51 MST 2009
// 17:57:51 MST
p.tzlen = i - p.tzi
if p.tzlen == 4 {
p.set(p.tzi, " MST")
@ -1333,7 +1441,7 @@ iterRunes:
p.trimExtra()
break
}
case '+', '-':
case '+', '-', '(':
// This really doesn't seem valid, but for some reason when round-tripping a go date
// their is an extra +03 printed out. seems like go bug to me, but, parsing anyway.
// 00:00:00 +0300 +03
@ -1350,6 +1458,7 @@ iterRunes:
p.setYear()
}
case unicode.IsLetter(r):
// 15:04:05 -0700 MST
if p.tzi == 0 {
p.tzi = i
}
@ -1535,6 +1644,17 @@ iterRunes:
}
switch p.stateTime {
case timeWsAlpha:
switch len(p.datestr) - p.tzi {
case 3:
// 13:31:51.999 +01:00 CET
p.set(p.tzi, "MST")
case 4:
p.set(p.tzi, "MST")
p.extra = len(p.datestr) - 1
p.trimExtra()
}
case timeWsAlphaWs:
p.yearlen = i - p.yeari
p.setYear()
@ -1554,13 +1674,34 @@ iterRunes:
case timePeriod:
p.mslen = i - p.msi
case timeOffset:
// 19:55:00+0100
p.set(p.offseti, "-0700")
switch len(p.datestr) - p.offseti {
case 0, 1, 2, 4:
return p, fmt.Errorf("TZ offset not recognized %q near %q (must be 2 or 4 digits optional colon)", datestr, string(datestr[p.offseti:]))
case 3:
// 19:55:00+01
p.set(p.offseti, "-07")
case 5:
// 19:55:00+0100
p.set(p.offseti, "-0700")
}
case timeWsOffset:
p.set(p.offseti, "-0700")
case timeWsOffsetWs:
// 17:57:51 -0700 2009
// 00:12:00 +0000 UTC
if p.tzi > 0 {
switch len(p.datestr) - p.tzi {
case 3:
// 13:31:51.999 +01:00 CET
p.set(p.tzi, "MST")
case 4:
// 13:31:51.999 +01:00 CEST
p.set(p.tzi, "MST ")
}
}
case timeWsOffsetColon:
// 17:57:51 -07:00
p.set(p.offseti, "-07:00")
@ -1638,6 +1779,9 @@ iterRunes:
p.t = &t
return p, nil
}
case dateDigitSt:
// 171113 14:14:20
return p, nil
case dateYearDash:
// 2006-01
@ -1650,6 +1794,16 @@ iterRunes:
// 2006-01-2
return p, nil
case dateYearDashDashOffset:
/// 2020-07-20+00:00
switch len(p.datestr) - p.offseti {
case 5:
p.set(p.offseti, "-0700")
case 6:
p.set(p.offseti, "-07:00")
}
return p, nil
case dateYearDashAlphaDash:
// 2013-Feb-03
// 2013-Feb-3
@ -1757,6 +1911,13 @@ iterRunes:
// 3/1/2014
// 10/13/2014
// 01/02/2006
return p, nil
case dateDigitSlashAlpha:
// 03/Jun/2014
return p, nil
case dateDigitYearSlash:
// 2014/10/13
return p, nil
@ -2002,10 +2163,12 @@ func (p *parser) parse() (time.Time, error) {
p.format = p.format[p.skip:]
p.datestr = p.datestr[p.skip:]
}
//gou.Debugf("parse %q AS %q", p.datestr, string(p.format))
if p.loc == nil {
// gou.Debugf("parse layout=%q input=%q \ntx, err := time.Parse(%q, %q)", string(p.format), p.datestr, string(p.format), p.datestr)
return time.Parse(string(p.format), p.datestr)
}
//gou.Debugf("parse layout=%q input=%q \ntx, err := time.ParseInLocation(%q, %q, %v)", string(p.format), p.datestr, string(p.format), p.datestr, p.loc)
return time.ParseInLocation(string(p.format), p.datestr, p.loc)
}
func isDay(alpha string) bool {

View File

@ -1,60 +0,0 @@
package css
import "fmt"
// Declaration represents a parsed style property
type Declaration struct {
Property string
Value string
Important bool
}
// NewDeclaration instanciates a new Declaration
func NewDeclaration() *Declaration {
return &Declaration{}
}
// Returns string representation of the Declaration
func (decl *Declaration) String() string {
return decl.StringWithImportant(true)
}
// StringWithImportant returns string representation with optional !important part
func (decl *Declaration) StringWithImportant(option bool) string {
result := fmt.Sprintf("%s: %s", decl.Property, decl.Value)
if option && decl.Important {
result += " !important"
}
result += ";"
return result
}
// Equal returns true if both Declarations are equals
func (decl *Declaration) Equal(other *Declaration) bool {
return (decl.Property == other.Property) && (decl.Value == other.Value) && (decl.Important == other.Important)
}
//
// DeclarationsByProperty
//
// DeclarationsByProperty represents sortable style declarations
type DeclarationsByProperty []*Declaration
// Implements sort.Interface
func (declarations DeclarationsByProperty) Len() int {
return len(declarations)
}
// Implements sort.Interface
func (declarations DeclarationsByProperty) Swap(i, j int) {
declarations[i], declarations[j] = declarations[j], declarations[i]
}
// Implements sort.Interface
func (declarations DeclarationsByProperty) Less(i, j int) bool {
return declarations[i].Property < declarations[j].Property
}

View File

@ -1,230 +0,0 @@
package css
import (
"fmt"
"strings"
)
const (
indentSpace = 2
)
// RuleKind represents a Rule kind
type RuleKind int
// Rule kinds
const (
QualifiedRule RuleKind = iota
AtRule
)
// At Rules than have Rules inside their block instead of Declarations
var atRulesWithRulesBlock = []string{
"@document", "@font-feature-values", "@keyframes", "@media", "@supports",
}
// Rule represents a parsed CSS rule
type Rule struct {
Kind RuleKind
// At Rule name (eg: "@media")
Name string
// Raw prelude
Prelude string
// Qualified Rule selectors parsed from prelude
Selectors []string
// Style properties
Declarations []*Declaration
// At Rule embedded rules
Rules []*Rule
// Current rule embedding level
EmbedLevel int
}
// NewRule instanciates a new Rule
func NewRule(kind RuleKind) *Rule {
return &Rule{
Kind: kind,
}
}
// Returns string representation of rule kind
func (kind RuleKind) String() string {
switch kind {
case QualifiedRule:
return "Qualified Rule"
case AtRule:
return "At Rule"
default:
return "WAT"
}
}
// EmbedsRules returns true if this rule embeds another rules
func (rule *Rule) EmbedsRules() bool {
if rule.Kind == AtRule {
for _, atRuleName := range atRulesWithRulesBlock {
if rule.Name == atRuleName {
return true
}
}
}
return false
}
// Equal returns true if both rules are equals
func (rule *Rule) Equal(other *Rule) bool {
if (rule.Kind != other.Kind) ||
(rule.Prelude != other.Prelude) ||
(rule.Name != other.Name) {
return false
}
if (len(rule.Selectors) != len(other.Selectors)) ||
(len(rule.Declarations) != len(other.Declarations)) ||
(len(rule.Rules) != len(other.Rules)) {
return false
}
for i, sel := range rule.Selectors {
if sel != other.Selectors[i] {
return false
}
}
for i, decl := range rule.Declarations {
if !decl.Equal(other.Declarations[i]) {
return false
}
}
for i, rule := range rule.Rules {
if !rule.Equal(other.Rules[i]) {
return false
}
}
return true
}
// Diff returns a string representation of rules differences
func (rule *Rule) Diff(other *Rule) []string {
result := []string{}
if rule.Kind != other.Kind {
result = append(result, fmt.Sprintf("Kind: %s | %s", rule.Kind.String(), other.Kind.String()))
}
if rule.Prelude != other.Prelude {
result = append(result, fmt.Sprintf("Prelude: \"%s\" | \"%s\"", rule.Prelude, other.Prelude))
}
if rule.Name != other.Name {
result = append(result, fmt.Sprintf("Name: \"%s\" | \"%s\"", rule.Name, other.Name))
}
if len(rule.Selectors) != len(other.Selectors) {
result = append(result, fmt.Sprintf("Selectors: %v | %v", strings.Join(rule.Selectors, ", "), strings.Join(other.Selectors, ", ")))
} else {
for i, sel := range rule.Selectors {
if sel != other.Selectors[i] {
result = append(result, fmt.Sprintf("Selector: \"%s\" | \"%s\"", sel, other.Selectors[i]))
}
}
}
if len(rule.Declarations) != len(other.Declarations) {
result = append(result, fmt.Sprintf("Declarations Nb: %d | %d", len(rule.Declarations), len(other.Declarations)))
} else {
for i, decl := range rule.Declarations {
if !decl.Equal(other.Declarations[i]) {
result = append(result, fmt.Sprintf("Declaration: \"%s\" | \"%s\"", decl.String(), other.Declarations[i].String()))
}
}
}
if len(rule.Rules) != len(other.Rules) {
result = append(result, fmt.Sprintf("Rules Nb: %d | %d", len(rule.Rules), len(other.Rules)))
} else {
for i, rule := range rule.Rules {
if !rule.Equal(other.Rules[i]) {
result = append(result, fmt.Sprintf("Rule: \"%s\" | \"%s\"", rule.String(), other.Rules[i].String()))
}
}
}
return result
}
// Returns the string representation of a rule
func (rule *Rule) String() string {
result := ""
if rule.Kind == QualifiedRule {
for i, sel := range rule.Selectors {
if i != 0 {
result += ", "
}
result += sel
}
} else {
// AtRule
result += fmt.Sprintf("%s", rule.Name)
if rule.Prelude != "" {
if result != "" {
result += " "
}
result += fmt.Sprintf("%s", rule.Prelude)
}
}
if (len(rule.Declarations) == 0) && (len(rule.Rules) == 0) {
result += ";"
} else {
result += " {\n"
if rule.EmbedsRules() {
for _, subRule := range rule.Rules {
result += fmt.Sprintf("%s%s\n", rule.indent(), subRule.String())
}
} else {
for _, decl := range rule.Declarations {
result += fmt.Sprintf("%s%s\n", rule.indent(), decl.String())
}
}
result += fmt.Sprintf("%s}", rule.indentEndBlock())
}
return result
}
// Returns identation spaces for declarations and rules
func (rule *Rule) indent() string {
result := ""
for i := 0; i < ((rule.EmbedLevel + 1) * indentSpace); i++ {
result += " "
}
return result
}
// Returns identation spaces for end of block character
func (rule *Rule) indentEndBlock() string {
result := ""
for i := 0; i < (rule.EmbedLevel * indentSpace); i++ {
result += " "
}
return result
}

View File

@ -1,25 +0,0 @@
package css
// Stylesheet represents a parsed stylesheet
type Stylesheet struct {
Rules []*Rule
}
// NewStylesheet instanciate a new Stylesheet
func NewStylesheet() *Stylesheet {
return &Stylesheet{}
}
// Returns string representation of the Stylesheet
func (sheet *Stylesheet) String() string {
result := ""
for _, rule := range sheet.Rules {
if result != "" {
result += "\n"
}
result += rule.String()
}
return result
}

View File

@ -1,22 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 Aymerick JEHANNE
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,409 +0,0 @@
package parser
import (
"errors"
"fmt"
"regexp"
"strings"
"github.com/gorilla/css/scanner"
"github.com/aymerick/douceur/css"
)
const (
importantSuffixRegexp = `(?i)\s*!important\s*$`
)
var (
importantRegexp *regexp.Regexp
)
// Parser represents a CSS parser
type Parser struct {
scan *scanner.Scanner // Tokenizer
// Tokens parsed but not consumed yet
tokens []*scanner.Token
// Rule embedding level
embedLevel int
}
func init() {
importantRegexp = regexp.MustCompile(importantSuffixRegexp)
}
// NewParser instanciates a new parser
func NewParser(txt string) *Parser {
return &Parser{
scan: scanner.New(txt),
}
}
// Parse parses a whole stylesheet
func Parse(text string) (*css.Stylesheet, error) {
result, err := NewParser(text).ParseStylesheet()
if err != nil {
return nil, err
}
return result, nil
}
// ParseDeclarations parses CSS declarations
func ParseDeclarations(text string) ([]*css.Declaration, error) {
result, err := NewParser(text).ParseDeclarations()
if err != nil {
return nil, err
}
return result, nil
}
// ParseStylesheet parses a stylesheet
func (parser *Parser) ParseStylesheet() (*css.Stylesheet, error) {
result := css.NewStylesheet()
// Parse BOM
if _, err := parser.parseBOM(); err != nil {
return result, err
}
// Parse list of rules
rules, err := parser.ParseRules()
if err != nil {
return result, err
}
result.Rules = rules
return result, nil
}
// ParseRules parses a list of rules
func (parser *Parser) ParseRules() ([]*css.Rule, error) {
result := []*css.Rule{}
inBlock := false
if parser.tokenChar("{") {
// parsing a block of rules
inBlock = true
parser.embedLevel++
parser.shiftToken()
}
for parser.tokenParsable() {
if parser.tokenIgnorable() {
parser.shiftToken()
} else if parser.tokenChar("}") {
if !inBlock {
errMsg := fmt.Sprintf("Unexpected } character: %s", parser.nextToken().String())
return result, errors.New(errMsg)
}
parser.shiftToken()
parser.embedLevel--
// finished
break
} else {
rule, err := parser.ParseRule()
if err != nil {
return result, err
}
rule.EmbedLevel = parser.embedLevel
result = append(result, rule)
}
}
return result, parser.err()
}
// ParseRule parses a rule
func (parser *Parser) ParseRule() (*css.Rule, error) {
if parser.tokenAtKeyword() {
return parser.parseAtRule()
}
return parser.parseQualifiedRule()
}
// ParseDeclarations parses a list of declarations
func (parser *Parser) ParseDeclarations() ([]*css.Declaration, error) {
result := []*css.Declaration{}
if parser.tokenChar("{") {
parser.shiftToken()
}
for parser.tokenParsable() {
if parser.tokenIgnorable() {
parser.shiftToken()
} else if parser.tokenChar("}") {
// end of block
parser.shiftToken()
break
} else {
declaration, err := parser.ParseDeclaration()
if err != nil {
return result, err
}
result = append(result, declaration)
}
}
return result, parser.err()
}
// ParseDeclaration parses a declaration
func (parser *Parser) ParseDeclaration() (*css.Declaration, error) {
result := css.NewDeclaration()
curValue := ""
for parser.tokenParsable() {
if parser.tokenChar(":") {
result.Property = strings.TrimSpace(curValue)
curValue = ""
parser.shiftToken()
} else if parser.tokenChar(";") || parser.tokenChar("}") {
if result.Property == "" {
errMsg := fmt.Sprintf("Unexpected ; character: %s", parser.nextToken().String())
return result, errors.New(errMsg)
}
if importantRegexp.MatchString(curValue) {
result.Important = true
curValue = importantRegexp.ReplaceAllString(curValue, "")
}
result.Value = strings.TrimSpace(curValue)
if parser.tokenChar(";") {
parser.shiftToken()
}
// finished
break
} else {
token := parser.shiftToken()
curValue += token.Value
}
}
// log.Printf("[parsed] Declaration: %s", result.String())
return result, parser.err()
}
// Parse an At Rule
func (parser *Parser) parseAtRule() (*css.Rule, error) {
// parse rule name (eg: "@import")
token := parser.shiftToken()
result := css.NewRule(css.AtRule)
result.Name = token.Value
for parser.tokenParsable() {
if parser.tokenChar(";") {
parser.shiftToken()
// finished
break
} else if parser.tokenChar("{") {
if result.EmbedsRules() {
// parse rules block
rules, err := parser.ParseRules()
if err != nil {
return result, err
}
result.Rules = rules
} else {
// parse declarations block
declarations, err := parser.ParseDeclarations()
if err != nil {
return result, err
}
result.Declarations = declarations
}
// finished
break
} else {
// parse prelude
prelude, err := parser.parsePrelude()
if err != nil {
return result, err
}
result.Prelude = prelude
}
}
// log.Printf("[parsed] Rule: %s", result.String())
return result, parser.err()
}
// Parse a Qualified Rule
func (parser *Parser) parseQualifiedRule() (*css.Rule, error) {
result := css.NewRule(css.QualifiedRule)
for parser.tokenParsable() {
if parser.tokenChar("{") {
if result.Prelude == "" {
errMsg := fmt.Sprintf("Unexpected { character: %s", parser.nextToken().String())
return result, errors.New(errMsg)
}
// parse declarations block
declarations, err := parser.ParseDeclarations()
if err != nil {
return result, err
}
result.Declarations = declarations
// finished
break
} else {
// parse prelude
prelude, err := parser.parsePrelude()
if err != nil {
return result, err
}
result.Prelude = prelude
}
}
result.Selectors = strings.Split(result.Prelude, ",")
for i, sel := range result.Selectors {
result.Selectors[i] = strings.TrimSpace(sel)
}
// log.Printf("[parsed] Rule: %s", result.String())
return result, parser.err()
}
// Parse Rule prelude
func (parser *Parser) parsePrelude() (string, error) {
result := ""
for parser.tokenParsable() && !parser.tokenEndOfPrelude() {
token := parser.shiftToken()
result += token.Value
}
result = strings.TrimSpace(result)
// log.Printf("[parsed] prelude: %s", result)
return result, parser.err()
}
// Parse BOM
func (parser *Parser) parseBOM() (bool, error) {
if parser.nextToken().Type == scanner.TokenBOM {
parser.shiftToken()
return true, nil
}
return false, parser.err()
}
// Returns next token without removing it from tokens buffer
func (parser *Parser) nextToken() *scanner.Token {
if len(parser.tokens) == 0 {
// fetch next token
nextToken := parser.scan.Next()
// log.Printf("[token] %s => %v", nextToken.Type.String(), nextToken.Value)
// queue it
parser.tokens = append(parser.tokens, nextToken)
}
return parser.tokens[0]
}
// Returns next token and remove it from the tokens buffer
func (parser *Parser) shiftToken() *scanner.Token {
var result *scanner.Token
result, parser.tokens = parser.tokens[0], parser.tokens[1:]
return result
}
// Returns tokenizer error, or nil if no error
func (parser *Parser) err() error {
if parser.tokenError() {
token := parser.nextToken()
return fmt.Errorf("Tokenizer error: %s", token.String())
}
return nil
}
// Returns true if next token is Error
func (parser *Parser) tokenError() bool {
return parser.nextToken().Type == scanner.TokenError
}
// Returns true if next token is EOF
func (parser *Parser) tokenEOF() bool {
return parser.nextToken().Type == scanner.TokenEOF
}
// Returns true if next token is a whitespace
func (parser *Parser) tokenWS() bool {
return parser.nextToken().Type == scanner.TokenS
}
// Returns true if next token is a comment
func (parser *Parser) tokenComment() bool {
return parser.nextToken().Type == scanner.TokenComment
}
// Returns true if next token is a CDO or a CDC
func (parser *Parser) tokenCDOorCDC() bool {
switch parser.nextToken().Type {
case scanner.TokenCDO, scanner.TokenCDC:
return true
default:
return false
}
}
// Returns true if next token is ignorable
func (parser *Parser) tokenIgnorable() bool {
return parser.tokenWS() || parser.tokenComment() || parser.tokenCDOorCDC()
}
// Returns true if next token is parsable
func (parser *Parser) tokenParsable() bool {
return !parser.tokenEOF() && !parser.tokenError()
}
// Returns true if next token is an At Rule keyword
func (parser *Parser) tokenAtKeyword() bool {
return parser.nextToken().Type == scanner.TokenAtKeyword
}
// Returns true if next token is given character
func (parser *Parser) tokenChar(value string) bool {
token := parser.nextToken()
return (token.Type == scanner.TokenChar) && (token.Value == value)
}
// Returns true if next token marks the end of a prelude
func (parser *Parser) tokenEndOfPrelude() bool {
return parser.tokenChar(";") || parser.tokenChar("{")
}

View File

@ -1,5 +1,5 @@
language: go
go:
- 1.9
- 1.5
- tip

View File

@ -43,8 +43,8 @@ The __last__ capture is embedded in each group, so `g.String()` will return the
| Category | regexp | regexp2 |
| --- | --- | --- |
| Catastrophic backtracking possible | no, constant execution time guarantees | yes, if your pattern is at risk you can use the `re.MatchTimeout` field |
| Python-style capture groups `(?P<name>re)` | yes | no (yes in RE2 compat mode) |
| .NET-style capture groups `(?<name>re)` or `(?'name're)` | no | yes |
| Python-style capture groups `(P<name>re)` | yes | no |
| .NET-style capture groups `(<name>re)` or `('name're)` | no | yes |
| comments `(?#comment)` | no | yes |
| branch numbering reset `(?\|a\|b)` | no | no |
| possessive match `(?>re)` | no | yes |
@ -54,15 +54,14 @@ The __last__ capture is embedded in each group, so `g.String()` will return the
| negative lookbehind `(?<!re)` | no | yes |
| back reference `\1` | no | yes |
| named back reference `\k'name'` | no | yes |
| named ascii character class `[[:foo:]]`| yes | no (yes in RE2 compat mode) |
| conditionals `(?(expr)yes\|no)` | no | yes |
| named ascii character class `[[:foo:]]`| yes | no |
| conditionals `((expr)yes\|no)` | no | yes |
## RE2 compatibility mode
The default behavior of `regexp2` is to match the .NET regexp engine, however the `RE2` option is provided to change the parsing to increase compatibility with RE2. Using the `RE2` option when compiling a regexp will not take away any features, but will change the following behaviors:
* add support for named ascii character classes (e.g. `[[:foo:]]`)
* add support for python-style capture groups (e.g. `(P<name>re)`)
* change singleline behavior for `$` to only match end of string (like RE2) (see [#24](https://github.com/dlclark/regexp2/issues/24))
```go
re := regexp2.MustCompile(`Your RE2-compatible pattern`, regexp2.RE2)
if isMatch, _ := re.MatchString(`Something to match`); isMatch {

View File

@ -235,14 +235,17 @@ func (re *Regexp) getRunesAndStart(s string, startAt int) ([]rune, int) {
ret[i] = r
i++
}
if startAt == len(s) {
runeIdx = i
}
return ret[:i], runeIdx
}
func getRunes(s string) []rune {
return []rune(s)
ret := make([]rune, len(s))
i := 0
for _, r := range s {
ret[i] = r
i++
}
return ret[:i]
}
// MatchRunes return true if the runes matches the regex

View File

@ -566,22 +566,9 @@ func (r *runner) execute() error {
continue
case syntax.EndZ:
rchars := r.rightchars()
if rchars > 1 {
if r.rightchars() > 1 || r.rightchars() == 1 && r.charAt(r.textPos()) != '\n' {
break
}
// RE2 and EcmaScript define $ as "asserts position at the end of the string"
// PCRE/.NET adds "or before the line terminator right at the end of the string (if any)"
if (r.re.options & (RE2 | ECMAScript)) != 0 {
// RE2/Ecmascript mode
if rchars > 0 {
break
}
} else if rchars == 1 && r.charAt(r.textPos()) != '\n' {
// "regular" mode
break
}
r.advance(0)
continue
@ -951,8 +938,8 @@ func (r *runner) advance(i int) {
}
func (r *runner) goTo(newpos int) {
// when branching backward or in place, ensure storage
if newpos <= r.codepos {
// when branching backward, ensure storage
if newpos < r.codepos {
r.ensureStorage()
}

View File

@ -1250,10 +1250,10 @@ func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) {
return nil, nil
}
if p.isCaptureSlot(capnum) {
if p.useOptionE() || p.isCaptureSlot(capnum) {
return newRegexNodeM(ntRef, p.options, capnum), nil
}
if capnum <= 9 && !p.useOptionE() {
if capnum <= 9 {
return nil, p.getErr(ErrUndefinedBackRef, capnum)
}
@ -1648,7 +1648,7 @@ func (p *parser) scanOptions() {
}
// Scans \ code for escape codes that map to single unicode chars.
func (p *parser) scanCharEscape() (r rune, err error) {
func (p *parser) scanCharEscape() (rune, error) {
ch := p.moveRightGetChar()
@ -1657,22 +1657,16 @@ func (p *parser) scanCharEscape() (r rune, err error) {
return p.scanOctal(), nil
}
pos := p.textpos()
switch ch {
case 'x':
// support for \x{HEX} syntax from Perl and PCRE
if p.charsRight() > 0 && p.rightChar(0) == '{' {
if p.useOptionE() {
return ch, nil
}
p.moveRight(1)
return p.scanHexUntilBrace()
} else {
r, err = p.scanHex(2)
}
return p.scanHex(2)
case 'u':
r, err = p.scanHex(4)
return p.scanHex(4)
case 'a':
return '\u0007', nil
case 'b':
@ -1690,18 +1684,13 @@ func (p *parser) scanCharEscape() (r rune, err error) {
case 'v':
return '\u000B', nil
case 'c':
r, err = p.scanControl()
return p.scanControl()
default:
if !p.useOptionE() && IsWordChar(ch) {
return 0, p.getErr(ErrUnrecognizedEscape, string(ch))
}
return ch, nil
}
if err != nil && p.useOptionE() {
p.textto(pos)
return ch, nil
}
return
}
// Grabs and converts an ascii control character
@ -1818,12 +1807,12 @@ func (p *parser) scanOctal() rune {
//we know the first char is good because the caller had to check
i := 0
d := int(p.rightChar(0) - '0')
for c > 0 && d <= 7 && d >= 0 {
if i >= 0x20 && p.useOptionE() {
break
}
for c > 0 && d <= 7 {
i *= 8
i += d
if p.useOptionE() && i >= 0x20 {
break
}
c--
p.moveRight(1)

View File

@ -1,27 +0,0 @@
Copyright (c) 2013, Gorilla web toolkit
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -1,33 +0,0 @@
// Copyright 2012 The Gorilla Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package gorilla/css/scanner generates tokens for a CSS3 input.
It follows the CSS3 specification located at:
http://www.w3.org/TR/css3-syntax/
To use it, create a new scanner for a given CSS string and call Next() until
the token returned has type TokenEOF or TokenError:
s := scanner.New(myCSS)
for {
token := s.Next()
if token.Type == scanner.TokenEOF || token.Type == scanner.TokenError {
break
}
// Do something with the token...
}
Following the CSS3 specification, an error can only occur when the scanner
finds an unclosed quote or unclosed comment. In these cases the text becomes
"untokenizable". Everything else is tokenizable and it is up to a parser
to make sense of the token stream (or ignore nonsensical token sequences).
Note: the scanner doesn't perform lexical analysis or, in other words, it
doesn't care about the token context. It is intended to be used by a
lexer or parser.
*/
package scanner

View File

@ -1,356 +0,0 @@
// Copyright 2012 The Gorilla Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package scanner
import (
"fmt"
"regexp"
"strings"
"unicode"
"unicode/utf8"
)
// tokenType identifies the type of lexical tokens.
type tokenType int
// String returns a string representation of the token type.
func (t tokenType) String() string {
return tokenNames[t]
}
// Token represents a token and the corresponding string.
type Token struct {
Type tokenType
Value string
Line int
Column int
}
// String returns a string representation of the token.
func (t *Token) String() string {
if len(t.Value) > 10 {
return fmt.Sprintf("%s (line: %d, column: %d): %.10q...",
t.Type, t.Line, t.Column, t.Value)
}
return fmt.Sprintf("%s (line: %d, column: %d): %q",
t.Type, t.Line, t.Column, t.Value)
}
// All tokens -----------------------------------------------------------------
// The complete list of tokens in CSS3.
const (
// Scanner flags.
TokenError tokenType = iota
TokenEOF
// From now on, only tokens from the CSS specification.
TokenIdent
TokenAtKeyword
TokenString
TokenHash
TokenNumber
TokenPercentage
TokenDimension
TokenURI
TokenUnicodeRange
TokenCDO
TokenCDC
TokenS
TokenComment
TokenFunction
TokenIncludes
TokenDashMatch
TokenPrefixMatch
TokenSuffixMatch
TokenSubstringMatch
TokenChar
TokenBOM
)
// tokenNames maps tokenType's to their names. Used for conversion to string.
var tokenNames = map[tokenType]string{
TokenError: "error",
TokenEOF: "EOF",
TokenIdent: "IDENT",
TokenAtKeyword: "ATKEYWORD",
TokenString: "STRING",
TokenHash: "HASH",
TokenNumber: "NUMBER",
TokenPercentage: "PERCENTAGE",
TokenDimension: "DIMENSION",
TokenURI: "URI",
TokenUnicodeRange: "UNICODE-RANGE",
TokenCDO: "CDO",
TokenCDC: "CDC",
TokenS: "S",
TokenComment: "COMMENT",
TokenFunction: "FUNCTION",
TokenIncludes: "INCLUDES",
TokenDashMatch: "DASHMATCH",
TokenPrefixMatch: "PREFIXMATCH",
TokenSuffixMatch: "SUFFIXMATCH",
TokenSubstringMatch: "SUBSTRINGMATCH",
TokenChar: "CHAR",
TokenBOM: "BOM",
}
// Macros and productions -----------------------------------------------------
// http://www.w3.org/TR/css3-syntax/#tokenization
var macroRegexp = regexp.MustCompile(`\{[a-z]+\}`)
// macros maps macro names to patterns to be expanded.
var macros = map[string]string{
// must be escaped: `\.+*?()|[]{}^$`
"ident": `-?{nmstart}{nmchar}*`,
"name": `{nmchar}+`,
"nmstart": `[a-zA-Z_]|{nonascii}|{escape}`,
"nonascii": "[\u0080-\uD7FF\uE000-\uFFFD\U00010000-\U0010FFFF]",
"unicode": `\\[0-9a-fA-F]{1,6}{wc}?`,
"escape": "{unicode}|\\\\[\u0020-\u007E\u0080-\uD7FF\uE000-\uFFFD\U00010000-\U0010FFFF]",
"nmchar": `[a-zA-Z0-9_-]|{nonascii}|{escape}`,
"num": `[0-9]*\.[0-9]+|[0-9]+`,
"string": `"(?:{stringchar}|')*"|'(?:{stringchar}|")*'`,
"stringchar": `{urlchar}|[ ]|\\{nl}`,
"nl": `[\n\r\f]|\r\n`,
"w": `{wc}*`,
"wc": `[\t\n\f\r ]`,
// urlchar should accept [(ascii characters minus those that need escaping)|{nonascii}|{escape}]
// ASCII characters range = `[\u0020-\u007e]`
// Skip space \u0020 = `[\u0021-\u007e]`
// Skip quotation mark \0022 = `[\u0021\u0023-\u007e]`
// Skip apostrophe \u0027 = `[\u0021\u0023-\u0026\u0028-\u007e]`
// Skip reverse solidus \u005c = `[\u0021\u0023-\u0026\u0028-\u005b\u005d\u007e]`
// Finally, the left square bracket (\u005b) and right (\u005d) needs escaping themselves
"urlchar": "[\u0021\u0023-\u0026\u0028-\\\u005b\\\u005d-\u007E]|{nonascii}|{escape}",
}
// productions maps the list of tokens to patterns to be expanded.
var productions = map[tokenType]string{
// Unused regexps (matched using other methods) are commented out.
TokenIdent: `{ident}`,
TokenAtKeyword: `@{ident}`,
TokenString: `{string}`,
TokenHash: `#{name}`,
TokenNumber: `{num}`,
TokenPercentage: `{num}%`,
TokenDimension: `{num}{ident}`,
TokenURI: `url\({w}(?:{string}|{urlchar}*?){w}\)`,
TokenUnicodeRange: `U\+[0-9A-F\?]{1,6}(?:-[0-9A-F]{1,6})?`,
//TokenCDO: `<!--`,
TokenCDC: `-->`,
TokenS: `{wc}+`,
TokenComment: `/\*[^\*]*[\*]+(?:[^/][^\*]*[\*]+)*/`,
TokenFunction: `{ident}\(`,
//TokenIncludes: `~=`,
//TokenDashMatch: `\|=`,
//TokenPrefixMatch: `\^=`,
//TokenSuffixMatch: `\$=`,
//TokenSubstringMatch: `\*=`,
//TokenChar: `[^"']`,
//TokenBOM: "\uFEFF",
}
// matchers maps the list of tokens to compiled regular expressions.
//
// The map is filled on init() using the macros and productions defined in
// the CSS specification.
var matchers = map[tokenType]*regexp.Regexp{}
// matchOrder is the order to test regexps when first-char shortcuts
// can't be used.
var matchOrder = []tokenType{
TokenURI,
TokenFunction,
TokenUnicodeRange,
TokenIdent,
TokenDimension,
TokenPercentage,
TokenNumber,
TokenCDC,
}
func init() {
// replace macros and compile regexps for productions.
replaceMacro := func(s string) string {
return "(?:" + macros[s[1:len(s)-1]] + ")"
}
for t, s := range productions {
for macroRegexp.MatchString(s) {
s = macroRegexp.ReplaceAllStringFunc(s, replaceMacro)
}
matchers[t] = regexp.MustCompile("^(?:" + s + ")")
}
}
// Scanner --------------------------------------------------------------------
// New returns a new CSS scanner for the given input.
func New(input string) *Scanner {
// Normalize newlines.
input = strings.Replace(input, "\r\n", "\n", -1)
return &Scanner{
input: input,
row: 1,
col: 1,
}
}
// Scanner scans an input and emits tokens following the CSS3 specification.
type Scanner struct {
input string
pos int
row int
col int
err *Token
}
// Next returns the next token from the input.
//
// At the end of the input the token type is TokenEOF.
//
// If the input can't be tokenized the token type is TokenError. This occurs
// in case of unclosed quotation marks or comments.
func (s *Scanner) Next() *Token {
if s.err != nil {
return s.err
}
if s.pos >= len(s.input) {
s.err = &Token{TokenEOF, "", s.row, s.col}
return s.err
}
if s.pos == 0 {
// Test BOM only once, at the beginning of the file.
if strings.HasPrefix(s.input, "\uFEFF") {
return s.emitSimple(TokenBOM, "\uFEFF")
}
}
// There's a lot we can guess based on the first byte so we'll take a
// shortcut before testing multiple regexps.
input := s.input[s.pos:]
switch input[0] {
case '\t', '\n', '\f', '\r', ' ':
// Whitespace.
return s.emitToken(TokenS, matchers[TokenS].FindString(input))
case '.':
// Dot is too common to not have a quick check.
// We'll test if this is a Char; if it is followed by a number it is a
// dimension/percentage/number, and this will be matched later.
if len(input) > 1 && !unicode.IsDigit(rune(input[1])) {
return s.emitSimple(TokenChar, ".")
}
case '#':
// Another common one: Hash or Char.
if match := matchers[TokenHash].FindString(input); match != "" {
return s.emitToken(TokenHash, match)
}
return s.emitSimple(TokenChar, "#")
case '@':
// Another common one: AtKeyword or Char.
if match := matchers[TokenAtKeyword].FindString(input); match != "" {
return s.emitSimple(TokenAtKeyword, match)
}
return s.emitSimple(TokenChar, "@")
case ':', ',', ';', '%', '&', '+', '=', '>', '(', ')', '[', ']', '{', '}':
// More common chars.
return s.emitSimple(TokenChar, string(input[0]))
case '"', '\'':
// String or error.
match := matchers[TokenString].FindString(input)
if match != "" {
return s.emitToken(TokenString, match)
}
s.err = &Token{TokenError, "unclosed quotation mark", s.row, s.col}
return s.err
case '/':
// Comment, error or Char.
if len(input) > 1 && input[1] == '*' {
match := matchers[TokenComment].FindString(input)
if match != "" {
return s.emitToken(TokenComment, match)
} else {
s.err = &Token{TokenError, "unclosed comment", s.row, s.col}
return s.err
}
}
return s.emitSimple(TokenChar, "/")
case '~':
// Includes or Char.
return s.emitPrefixOrChar(TokenIncludes, "~=")
case '|':
// DashMatch or Char.
return s.emitPrefixOrChar(TokenDashMatch, "|=")
case '^':
// PrefixMatch or Char.
return s.emitPrefixOrChar(TokenPrefixMatch, "^=")
case '$':
// SuffixMatch or Char.
return s.emitPrefixOrChar(TokenSuffixMatch, "$=")
case '*':
// SubstringMatch or Char.
return s.emitPrefixOrChar(TokenSubstringMatch, "*=")
case '<':
// CDO or Char.
return s.emitPrefixOrChar(TokenCDO, "<!--")
}
// Test all regexps, in order.
for _, token := range matchOrder {
if match := matchers[token].FindString(input); match != "" {
return s.emitToken(token, match)
}
}
// We already handled unclosed quotation marks and comments,
// so this can only be a Char.
r, width := utf8.DecodeRuneInString(input)
token := &Token{TokenChar, string(r), s.row, s.col}
s.col += width
s.pos += width
return token
}
// updatePosition updates input coordinates based on the consumed text.
func (s *Scanner) updatePosition(text string) {
width := utf8.RuneCountInString(text)
lines := strings.Count(text, "\n")
s.row += lines
if lines == 0 {
s.col += width
} else {
s.col = utf8.RuneCountInString(text[strings.LastIndex(text, "\n"):])
}
s.pos += len(text) // while col is a rune index, pos is a byte index
}
// emitToken returns a Token for the string v and updates the scanner position.
func (s *Scanner) emitToken(t tokenType, v string) *Token {
token := &Token{t, v, s.row, s.col}
s.updatePosition(v)
return token
}
// emitSimple returns a Token for the string v and updates the scanner
// position in a simplified manner.
//
// The string is known to have only ASCII characters and to not have a newline.
func (s *Scanner) emitSimple(t tokenType, v string) *Token {
token := &Token{t, v, s.row, s.col}
s.col += len(v)
s.pos += len(v)
return token
}
// emitPrefixOrChar returns a Token for type t if the current position
// matches the given prefix. Otherwise it returns a Char token using the
// first character from the prefix.
//
// The prefix is known to have only ASCII characters and to not have a newline.
func (s *Scanner) emitPrefixOrChar(t tokenType, prefix string) *Token {
if strings.HasPrefix(s.input[s.pos:], prefix) {
return s.emitSimple(t, prefix)
}
return s.emitSimple(TokenChar, string(prefix[0]))
}

View File

@ -1,28 +1,101 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Created by https://www.toptal.com/developers/gitignore/api/code,go,linux,macos,windows
# Edit at https://www.toptal.com/developers/gitignore?templates=code,go,linux,macos,windows
# Folders
_obj
_test
# Vim swap files
.*.sw?
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
### Code ###
.vscode/*
!.vscode/tasks.json
!.vscode/launch.json
*.code-workspace
### Go ###
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Code coverage stuff
coverage.out
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Dependency directories (remove the comment below to include it)
# vendor/
### Go Patch ###
/vendor/
/Godeps/
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### Windows ###
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# End of https://www.toptal.com/developers/gitignore/api/code,go,linux,macos,windows

View File

@ -1,7 +0,0 @@
language: go
install:
- go get golang.org/x/tools/cmd/cover
- go get github.com/mattn/goveralls
script:
- go test -v -covermode=count -coverprofile=coverage.out
- if [[ "$TRAVIS_PULL_REQUEST" = "false" ]]; then $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci -repotoken $COVERALLS_TOKEN; fi

42
vendor/github.com/lucasb-eyer/go-colorful/CHANGELOG.md generated vendored Normal file
View File

@ -0,0 +1,42 @@
# Changelog
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
The format of this file is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
but only releases after v1.0.3 properly adhere to it.
## [1.2.0] - 2021-01-27
### Added
- HSLuv and HPLuv color spaces (#41, #51)
- CIE LCh(uv) color space, called `LuvLCh` in code (#51)
- JSON and envconfig serialization support for `HexColor` (#42)
- `DistanceLinearRGB` (#53)
### Fixed
- RGB to/from XYZ conversion is more accurate (#51)
- A bug in `XYZToLuvWhiteRef` that only applied to very small values was fixed (#51)
- `BlendHCL` output is clamped so that it's not invalid (#46)
- Properly documented `DistanceCIE76` (#40)
- Some small godoc fixes
## [1.0.3] - 2019-11-11
- Remove SQLMock dependency
## [1.0.2] - 2019-04-07
- Fixes SQLMock dependency
## [1.0.1] - 2019-03-24
- Adds support for Go Modules
## [1.0.0] - 2018-05-26
- API Breaking change in `MakeColor`: instead of `panic`ing when alpha is zero, it now returns a secondary, boolean return value indicating success. See [the color.Color interface](#the-colorcolor-interface) section and [this FAQ entry](#q-why-would-makecolor-ever-fail) for details.
## [0.9.0] - 2018-05-26
- Initial version number after having ignored versioning for a long time :)

View File

@ -1,9 +1,9 @@
go-colorful
===========
A library for playing with colors in go (golang).
[![Build Status](https://travis-ci.org/lucasb-eyer/go-colorful.svg?branch=master)](https://travis-ci.org/lucasb-eyer/go-colorful)
[![Coverage Status](https://coveralls.io/repos/github/lucasb-eyer/go-colorful/badge.svg?branch=master)](https://coveralls.io/github/lucasb-eyer/go-colorful?branch=master)
[![go reportcard](https://goreportcard.com/badge/github.com/lucasb-eyer/go-colorful)](https://goreportcard.com/report/github.com/lucasb-eyer/go-colorful)
A library for playing with colors in Go. Supports Go 1.13 onwards.
Why?
====
@ -30,6 +30,9 @@ Go-Colorful stores colors in RGB and provides methods from converting these to v
- **CIE-L\*a\*b\*:** A *perceptually uniform* color space, i.e. distances are meaningful. L\* in [0..1] and a\*, b\* almost in [-1..1].
- **CIE-L\*u\*v\*:** Very similar to CIE-L\*a\*b\*, there is [no consensus](http://en.wikipedia.org/wiki/CIELUV#Historical_background) on which one is "better".
- **CIE-L\*C\*h° (HCL):** This is generally the [most useful](http://vis4.net/blog/posts/avoid-equidistant-hsv-colors/) one; CIE-L\*a\*b\* space in polar coordinates, i.e. a *better* HSV. H° is in [0..360], C\* almost in [-1..1] and L\* as in CIE-L\*a\*b\*.
- **CIE LCh(uv):** Called `LuvLCh` in code, this is a cylindrical transformation of the CIE-L\*u\*v\* color space. Like HCL above: H° is in [0..360], C\* almost in [-1..1] and L\* as in CIE-L\*u\*v\*.
- **HSLuv:** The better alternative to HSL, see [here](https://www.hsluv.org/) and [here](https://www.kuon.ch/post/2020-03-08-hsluv/). Hue in [0..360], Saturation and Luminance in [0..1].
- **HPLuv:** A variant of HSLuv. The color space is smoother, but only pastel colors can be included. Because the valid colors are limited, it's easy to get invalid Saturation values way above 1.0, indicating the color can't be represented in HPLuv beccause it's not pastel.
For the colorspaces where it makes sense (XYZ, Lab, Luv, HCl), the
[D65](http://en.wikipedia.org/wiki/Illuminant_D65) is used as reference white
@ -248,14 +251,14 @@ func main() {
//c2, _ := colorful.Hex("#1E3140")
for i := 0 ; i < blocks ; i++ {
draw.Draw(img, image.Rect(i*blockw, 0,(i+1)*blockw, 40), &image.Uniform{c1.BlendHsv(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
draw.Draw(img, image.Rect(i*blockw, 40,(i+1)*blockw, 80), &image.Uniform{c1.BlendLuv(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
draw.Draw(img, image.Rect(i*blockw, 80,(i+1)*blockw,120), &image.Uniform{c1.BlendRgb(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
draw.Draw(img, image.Rect(i*blockw,120,(i+1)*blockw,160), &image.Uniform{c1.BlendLab(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
draw.Draw(img, image.Rect(i*blockw,160,(i+1)*blockw,200), &image.Uniform{c1.BlendHcl(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
draw.Draw(img, image.Rect(i*blockw, 0,(i+1)*blockw, 40), &image.Uniform{c1.BlendHsv(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
draw.Draw(img, image.Rect(i*blockw, 40,(i+1)*blockw, 80), &image.Uniform{c1.BlendLuv(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
draw.Draw(img, image.Rect(i*blockw, 80,(i+1)*blockw,120), &image.Uniform{c1.BlendRgb(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
draw.Draw(img, image.Rect(i*blockw,120,(i+1)*blockw,160), &image.Uniform{c1.BlendLab(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
draw.Draw(img, image.Rect(i*blockw,160,(i+1)*blockw,200), &image.Uniform{c1.BlendHcl(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
// This can be used to "fix" invalid colors in the gradient.
//draw.Draw(img, image.Rect(i*blockw,160,(i+1)*blockw,200), &image.Uniform{c1.BlendHcl(c2, float64(i)/float64(blocks-1)).Clamped()}, image.ZP, draw.Src)
//draw.Draw(img, image.Rect(i*blockw,160,(i+1)*blockw,200), &image.Uniform{c1.BlendHcl(c2, float64(i)/float64(blocks-1)).Clamped()}, image.Point{}, draw.Src)
}
toimg, err := os.Create("colorblend.png")
@ -468,25 +471,12 @@ section above.
Who?
====
This library has been developed by Lucas Beyer with contributions from
This library was developed by Lucas Beyer with contributions from
Bastien Dejean (@baskerville), Phil Kulak (@pkulak) and Christian Muehlhaeuser (@muesli).
Release Notes
=============
It is now maintained by makeworld (@makeworld-the-better-one).
### Version 1.0
- API Breaking change in `MakeColor`: instead of `panic`ing when alpha is zero, it now returns a secondary, boolean return value indicating success. See [the color.Color interface](https://github.com/lucasb-eyer/go-colorful#the-colorcolor-interface) section and [this FAQ entry](https://github.com/lucasb-eyer/go-colorful#q-why-would-makecolor-ever-fail) for details.
### Version 0.9
- Initial version number after having ignored versioning for a long time :)
License: MIT
============
Copyright (c) 2013 Lucas Beyer
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
## License
This repo is under the MIT license, see [LICENSE](LICENSE) for details.

View File

@ -48,6 +48,11 @@ func (col Color) RGB255() (r, g, b uint8) {
return
}
// Used to simplify HSLuv testing.
func (col Color) values() (float64, float64, float64) {
return col.R, col.G, col.B
}
// This is the tolerance used when comparing colors using AlmostEqualRgb.
const Delta = 1.0 / 255.0
@ -64,6 +69,7 @@ func (c Color) IsValid() bool {
0.0 <= c.B && c.B <= 1.0
}
// clamp01 clamps from 0 to 1.
func clamp01(v float64) float64 {
return math.Max(0.0, math.Min(v, 1.0))
}
@ -88,6 +94,15 @@ func (c1 Color) DistanceRgb(c2 Color) float64 {
return math.Sqrt(sq(c1.R-c2.R) + sq(c1.G-c2.G) + sq(c1.B-c2.B))
}
// DistanceLinearRGB computes the distance between two colors in linear RGB
// space. This is not useful for measuring how humans perceive color, but
// might be useful for other things, like dithering.
func (c1 Color) DistanceLinearRGB(c2 Color) float64 {
r1, g1, b1 := c1.LinearRgb()
r2, g2, b2 := c2.LinearRgb()
return math.Sqrt(sq(r1-r2) + sq(g1-g2) + sq(b1-b2))
}
// Check for equality between colors within the tolerance Delta (1/255).
func (c1 Color) AlmostEqualRgb(c2 Color) bool {
return math.Abs(c1.R-c2.R)+
@ -422,16 +437,16 @@ func FastLinearRgb(r, g, b float64) Color {
// XyzToLinearRgb converts from CIE XYZ-space to Linear RGB space.
func XyzToLinearRgb(x, y, z float64) (r, g, b float64) {
r = 3.2404542*x - 1.5371385*y - 0.4985314*z
g = -0.9692660*x + 1.8760108*y + 0.0415560*z
b = 0.0556434*x - 0.2040259*y + 1.0572252*z
r = 3.2409699419045214*x - 1.5373831775700935*y - 0.49861076029300328*z
g = -0.96924363628087983*x + 1.8759675015077207*y + 0.041555057407175613*z
b = 0.055630079696993609*x - 0.20397695888897657*y + 1.0569715142428786*z
return
}
func LinearRgbToXyz(r, g, b float64) (x, y, z float64) {
x = 0.4124564*r + 0.3575761*g + 0.1804375*b
y = 0.2126729*r + 0.7151522*g + 0.0721750*b
z = 0.0193339*r + 0.1191920*g + 0.9503041*b
x = 0.41239079926595948*r + 0.35758433938387796*g + 0.18048078840183429*b
y = 0.21263900587151036*r + 0.71516867876775593*g + 0.072192315360733715*b
z = 0.019330818715591851*r + 0.11919477979462599*g + 0.95053215224966058*b
return
}
@ -569,7 +584,7 @@ func (col Color) LabWhiteRef(wref [3]float64) (l, a, b float64) {
// Generates a color by using data given in CIE L*a*b* space using D65 as reference white.
// WARNING: many combinations of `l`, `a`, and `b` values do not have corresponding
// valid RGB values, check the FAQ in the README if you're unsure.
// valid RGB values, check the FAQ in the README if you're unsure.
func Lab(l, a, b float64) Color {
return Xyz(LabToXyz(l, a, b))
}
@ -589,7 +604,7 @@ func (c1 Color) DistanceLab(c2 Color) float64 {
return math.Sqrt(sq(l1-l2) + sq(a1-a2) + sq(b1-b2))
}
// That's actually the same, but I don't want to break code.
// DistanceCIE76 is the same as DistanceLab.
func (c1 Color) DistanceCIE76(c2 Color) float64 {
return c1.DistanceLab(c2)
}
@ -739,7 +754,7 @@ func XyzToLuv(x, y, z float64) (l, a, b float64) {
func XyzToLuvWhiteRef(x, y, z float64, wref [3]float64) (l, u, v float64) {
if y/wref[1] <= 6.0/29.0*6.0/29.0*6.0/29.0 {
l = y / wref[1] * 29.0 / 3.0 * 29.0 / 3.0 * 29.0 / 3.0
l = y / wref[1] * (29.0 / 3.0 * 29.0 / 3.0 * 29.0 / 3.0) / 100.0
} else {
l = 1.16*math.Cbrt(y/wref[1]) - 0.16
}
@ -803,8 +818,8 @@ func (col Color) LuvWhiteRef(wref [3]float64) (l, u, v float64) {
// Generates a color by using data given in CIE L*u*v* space using D65 as reference white.
// L* is in [0..1] and both u* and v* are in about [-1..1]
// WARNING: many combinations of `l`, `a`, and `b` values do not have corresponding
// valid RGB values, check the FAQ in the README if you're unsure.
// WARNING: many combinations of `l`, `u`, and `v` values do not have corresponding
// valid RGB values, check the FAQ in the README if you're unsure.
func Luv(l, u, v float64) Color {
return Xyz(LuvToXyz(l, u, v))
}
@ -870,8 +885,8 @@ func (col Color) HclWhiteRef(wref [3]float64) (h, c, l float64) {
// Generates a color by using data given in HCL space using D65 as reference white.
// H values are in [0..360], C and L values are in [0..1]
// WARNING: many combinations of `l`, `a`, and `b` values do not have corresponding
// valid RGB values, check the FAQ in the README if you're unsure.
// WARNING: many combinations of `h`, `c`, and `l` values do not have corresponding
// valid RGB values, check the FAQ in the README if you're unsure.
func Hcl(h, c, l float64) Color {
return HclWhiteRef(h, c, l, D65)
}
@ -899,5 +914,66 @@ func (col1 Color) BlendHcl(col2 Color, t float64) Color {
h2, c2, l2 := col2.Hcl()
// We know that h are both in [0..360]
return Hcl(interp_angle(h1, h2, t), c1+t*(c2-c1), l1+t*(l2-l1))
return Hcl(interp_angle(h1, h2, t), c1+t*(c2-c1), l1+t*(l2-l1)).Clamped()
}
// LuvLch
// Converts the given color to LuvLCh space using D65 as reference white.
// h values are in [0..360], C and L values are in [0..1] although C can overshoot 1.0
func (col Color) LuvLCh() (l, c, h float64) {
return col.LuvLChWhiteRef(D65)
}
func LuvToLuvLCh(L, u, v float64) (l, c, h float64) {
// Oops, floating point workaround necessary if u ~= v and both are very small (i.e. almost zero).
if math.Abs(v-u) > 1e-4 && math.Abs(u) > 1e-4 {
h = math.Mod(57.29577951308232087721*math.Atan2(v, u)+360.0, 360.0) // Rad2Deg
} else {
h = 0.0
}
l = L
c = math.Sqrt(sq(u) + sq(v))
return
}
// Converts the given color to LuvLCh space, taking into account
// a given reference white. (i.e. the monitor's white)
// h values are in [0..360], c and l values are in [0..1]
func (col Color) LuvLChWhiteRef(wref [3]float64) (l, c, h float64) {
return LuvToLuvLCh(col.LuvWhiteRef(wref))
}
// Generates a color by using data given in LuvLCh space using D65 as reference white.
// h values are in [0..360], C and L values are in [0..1]
// WARNING: many combinations of `l`, `c`, and `h` values do not have corresponding
// valid RGB values, check the FAQ in the README if you're unsure.
func LuvLCh(l, c, h float64) Color {
return LuvLChWhiteRef(l, c, h, D65)
}
func LuvLChToLuv(l, c, h float64) (L, u, v float64) {
H := 0.01745329251994329576 * h // Deg2Rad
u = c * math.Cos(H)
v = c * math.Sin(H)
L = l
return
}
// Generates a color by using data given in LuvLCh space, taking
// into account a given reference white. (i.e. the monitor's white)
// h values are in [0..360], C and L values are in [0..1]
func LuvLChWhiteRef(l, c, h float64, wref [3]float64) Color {
L, u, v := LuvLChToLuv(l, c, h)
return LuvWhiteRef(L, u, v, wref)
}
// BlendLuvLCh blends two colors in the cylindrical CIELUV color space.
// t == 0 results in c1, t == 1 results in c2
func (col1 Color) BlendLuvLCh(col2 Color, t float64) Color {
l1, c1, h1 := col1.LuvLCh()
l2, c2, h2 := col2.LuvLCh()
// We know that h are both in [0..360]
return LuvLCh(l1+t*(l2-l1), c1+t*(c2-c1), interp_angle(h1, h2, t))
}

View File

View File

@ -2,12 +2,14 @@ package colorful
import (
"database/sql/driver"
"encoding/json"
"fmt"
"reflect"
)
// A HexColor is a Color stored as a hex string "#rrggbb". It implements the
// database/sql.Scanner and database/sql/driver.Value interfaces.
// database/sql.Scanner, database/sql/driver.Value,
// encoding/json.Unmarshaler and encoding/json.Marshaler interfaces.
type HexColor Color
type errUnsupportedType struct {
@ -35,3 +37,31 @@ func (hc *HexColor) Value() (driver.Value, error) {
func (e errUnsupportedType) Error() string {
return fmt.Sprintf("unsupported type: got %v, want a %s", e.got, e.want)
}
func (hc *HexColor) UnmarshalJSON(data []byte) error {
var hexCode string
if err := json.Unmarshal(data, &hexCode); err != nil {
return err
}
var col, err = Hex(hexCode)
if err != nil {
return err
}
*hc = HexColor(col)
return nil
}
func (hc HexColor) MarshalJSON() ([]byte, error) {
return json.Marshal(Color(hc).Hex())
}
// Decode - deserialize function for https://github.com/kelseyhightower/envconfig
func (hc *HexColor) Decode(hexCode string) error {
var col, err = Hex(hexCode)
if err != nil {
return err
}
*hc = HexColor(col)
return nil
}

File diff suppressed because one or more lines are too long

207
vendor/github.com/lucasb-eyer/go-colorful/hsluv.go generated vendored Normal file
View File

@ -0,0 +1,207 @@
package colorful
import "math"
// Source: https://github.com/hsluv/hsluv-go
// Under MIT License
// Modified so that Saturation and Luminance are in [0..1] instead of [0..100].
// HSLuv uses a rounded version of the D65. This has no impact on the final RGB
// values, but to keep high levels of accuracy for internal operations and when
// comparing to the test values, this modified white reference is used internally.
//
// See this GitHub thread for details on these values:
// https://github.com/hsluv/hsluv/issues/79
var hSLuvD65 = [3]float64{0.95045592705167, 1.0, 1.089057750759878}
func LuvLChToHSLuv(l, c, h float64) (float64, float64, float64) {
// [-1..1] but the code expects it to be [-100..100]
c *= 100.0
l *= 100.0
var s, max float64
if l > 99.9999999 || l < 0.00000001 {
s = 0.0
} else {
max = maxChromaForLH(l, h)
s = c / max * 100.0
}
return h, clamp01(s / 100.0), clamp01(l / 100.0)
}
func HSLuvToLuvLCh(h, s, l float64) (float64, float64, float64) {
l *= 100.0
s *= 100.0
var c, max float64
if l > 99.9999999 || l < 0.00000001 {
c = 0.0
} else {
max = maxChromaForLH(l, h)
c = max / 100.0 * s
}
// c is [-100..100], but for LCh it's supposed to be almost [-1..1]
return clamp01(l / 100.0), c / 100.0, h
}
func LuvLChToHPLuv(l, c, h float64) (float64, float64, float64) {
// [-1..1] but the code expects it to be [-100..100]
c *= 100.0
l *= 100.0
var s, max float64
if l > 99.9999999 || l < 0.00000001 {
s = 0.0
} else {
max = maxSafeChromaForL(l)
s = c / max * 100.0
}
return h, s / 100.0, l / 100.0
}
func HPLuvToLuvLCh(h, s, l float64) (float64, float64, float64) {
// [-1..1] but the code expects it to be [-100..100]
l *= 100.0
s *= 100.0
var c, max float64
if l > 99.9999999 || l < 0.00000001 {
c = 0.0
} else {
max = maxSafeChromaForL(l)
c = max / 100.0 * s
}
return l / 100.0, c / 100.0, h
}
// HSLuv creates a new Color from values in the HSLuv color space.
// Hue in [0..360], a Saturation [0..1], and a Luminance (lightness) in [0..1].
//
// The returned color values are clamped (using .Clamped), so this will never output
// an invalid color.
func HSLuv(h, s, l float64) Color {
// HSLuv -> LuvLCh -> CIELUV -> CIEXYZ -> Linear RGB -> sRGB
l, u, v := LuvLChToLuv(HSLuvToLuvLCh(h, s, l))
return LinearRgb(XyzToLinearRgb(LuvToXyzWhiteRef(l, u, v, hSLuvD65))).Clamped()
}
// HPLuv creates a new Color from values in the HPLuv color space.
// Hue in [0..360], a Saturation [0..1], and a Luminance (lightness) in [0..1].
//
// The returned color values are clamped (using .Clamped), so this will never output
// an invalid color.
func HPLuv(h, s, l float64) Color {
// HPLuv -> LuvLCh -> CIELUV -> CIEXYZ -> Linear RGB -> sRGB
l, u, v := LuvLChToLuv(HPLuvToLuvLCh(h, s, l))
return LinearRgb(XyzToLinearRgb(LuvToXyzWhiteRef(l, u, v, hSLuvD65))).Clamped()
}
// HSLuv returns the Hue, Saturation and Luminance of the color in the HSLuv
// color space. Hue in [0..360], a Saturation [0..1], and a Luminance
// (lightness) in [0..1].
func (col Color) HSLuv() (h, s, l float64) {
// sRGB -> Linear RGB -> CIEXYZ -> CIELUV -> LuvLCh -> HSLuv
return LuvLChToHSLuv(col.LuvLChWhiteRef(hSLuvD65))
}
// HPLuv returns the Hue, Saturation and Luminance of the color in the HSLuv
// color space. Hue in [0..360], a Saturation [0..1], and a Luminance
// (lightness) in [0..1].
//
// Note that HPLuv can only represent pastel colors, and so the Saturation
// value could be much larger than 1 for colors it can't represent.
func (col Color) HPLuv() (h, s, l float64) {
return LuvLChToHPLuv(col.LuvLChWhiteRef(hSLuvD65))
}
// DistanceHSLuv calculates Euclidan distance in the HSLuv colorspace. No idea
// how useful this is.
//
// The Hue value is divided by 100 before the calculation, so that H, S, and L
// have the same relative ranges.
func (c1 Color) DistanceHSLuv(c2 Color) float64 {
h1, s1, l1 := c1.HSLuv()
h2, s2, l2 := c2.HSLuv()
return math.Sqrt(sq((h1-h2)/100.0) + sq(s1-s2) + sq(l1-l2))
}
// DistanceHPLuv calculates Euclidean distance in the HPLuv colorspace. No idea
// how useful this is.
//
// The Hue value is divided by 100 before the calculation, so that H, S, and L
// have the same relative ranges.
func (c1 Color) DistanceHPLuv(c2 Color) float64 {
h1, s1, l1 := c1.HPLuv()
h2, s2, l2 := c2.HPLuv()
return math.Sqrt(sq((h1-h2)/100.0) + sq(s1-s2) + sq(l1-l2))
}
var m = [3][3]float64{
{3.2409699419045214, -1.5373831775700935, -0.49861076029300328},
{-0.96924363628087983, 1.8759675015077207, 0.041555057407175613},
{0.055630079696993609, -0.20397695888897657, 1.0569715142428786},
}
const kappa = 903.2962962962963
const epsilon = 0.0088564516790356308
func maxChromaForLH(l, h float64) float64 {
hRad := h / 360.0 * math.Pi * 2.0
minLength := math.MaxFloat64
for _, line := range getBounds(l) {
length := lengthOfRayUntilIntersect(hRad, line[0], line[1])
if length > 0.0 && length < minLength {
minLength = length
}
}
return minLength
}
func getBounds(l float64) [6][2]float64 {
var sub2 float64
var ret [6][2]float64
sub1 := math.Pow(l+16.0, 3.0) / 1560896.0
if sub1 > epsilon {
sub2 = sub1
} else {
sub2 = l / kappa
}
for i := range m {
for k := 0; k < 2; k++ {
top1 := (284517.0*m[i][0] - 94839.0*m[i][2]) * sub2
top2 := (838422.0*m[i][2]+769860.0*m[i][1]+731718.0*m[i][0])*l*sub2 - 769860.0*float64(k)*l
bottom := (632260.0*m[i][2]-126452.0*m[i][1])*sub2 + 126452.0*float64(k)
ret[i*2+k][0] = top1 / bottom
ret[i*2+k][1] = top2 / bottom
}
}
return ret
}
func lengthOfRayUntilIntersect(theta, x, y float64) (length float64) {
length = y / (math.Sin(theta) - x*math.Cos(theta))
return
}
func maxSafeChromaForL(l float64) float64 {
minLength := math.MaxFloat64
for _, line := range getBounds(l) {
m1 := line[0]
b1 := line[1]
x := intersectLineLine(m1, b1, -1.0/m1, 0.0)
dist := distanceFromPole(x, b1+x*m1)
if dist < minLength {
minLength = dist
}
}
return minLength
}
func intersectLineLine(x1, y1, x2, y2 float64) float64 {
return (y1 - y2) / (x2 - x1)
}
func distanceFromPole(x, y float64) float64 {
return math.Sqrt(math.Pow(x, 2.0) + math.Pow(y, 2.0))
}

View File

@ -61,7 +61,7 @@ func SoftPaletteEx(colorsCount int, settings SoftPaletteSettings) ([]Color, erro
// That would cause some infinite loops down there...
if len(samples) < colorsCount {
return nil, fmt.Errorf("palettegen: more colors requested (%v) than samples available (%v). Your requested color count may be wrong, you might want to use many samples or your constraint function makes the valid color space too small.", colorsCount, len(samples))
return nil, fmt.Errorf("palettegen: more colors requested (%v) than samples available (%v). Your requested color count may be wrong, you might want to use many samples or your constraint function makes the valid color space too small", colorsCount, len(samples))
} else if len(samples) == colorsCount {
return labs2cols(samples), nil // Oops?
}

View File

@ -1,3 +1,5 @@
module github.com/mattn/go-runewidth
go 1.9
require github.com/rivo/uniseg v0.1.0

2
vendor/github.com/mattn/go-runewidth/go.sum generated vendored Normal file
View File

@ -0,0 +1,2 @@
github.com/rivo/uniseg v0.1.0 h1:+2KBaVoUmb9XzDsrx/Ct0W/EYOSFf/nWTauy++DprtY=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=

View File

@ -2,6 +2,8 @@ package runewidth
import (
"os"
"github.com/rivo/uniseg"
)
//go:generate go run script/generate.go
@ -10,9 +12,6 @@ var (
// EastAsianWidth will be set true if the current locale is CJK
EastAsianWidth bool
// ZeroWidthJoiner is flag to set to use UTR#51 ZWJ
ZeroWidthJoiner bool
// DefaultCondition is a condition in current locale
DefaultCondition = &Condition{}
)
@ -30,7 +29,6 @@ func handleEnv() {
}
// update DefaultCondition
DefaultCondition.EastAsianWidth = EastAsianWidth
DefaultCondition.ZeroWidthJoiner = ZeroWidthJoiner
}
type interval struct {
@ -85,15 +83,13 @@ var nonprint = table{
// Condition have flag EastAsianWidth whether the current locale is CJK or not.
type Condition struct {
EastAsianWidth bool
ZeroWidthJoiner bool
EastAsianWidth bool
}
// NewCondition return new instance of Condition which is current locale.
func NewCondition() *Condition {
return &Condition{
EastAsianWidth: EastAsianWidth,
ZeroWidthJoiner: ZeroWidthJoiner,
EastAsianWidth: EastAsianWidth,
}
}
@ -110,38 +106,20 @@ func (c *Condition) RuneWidth(r rune) int {
}
}
func (c *Condition) stringWidth(s string) (width int) {
for _, r := range []rune(s) {
width += c.RuneWidth(r)
}
return width
}
func (c *Condition) stringWidthZeroJoiner(s string) (width int) {
r1, r2 := rune(0), rune(0)
for _, r := range []rune(s) {
if r == 0xFE0E || r == 0xFE0F {
continue
}
w := c.RuneWidth(r)
if r2 == 0x200D && inTables(r, emoji) && inTables(r1, emoji) {
if width < w {
width = w
}
} else {
width += w
}
r1, r2 = r2, r
}
return width
}
// StringWidth return width as you can see
func (c *Condition) StringWidth(s string) (width int) {
if c.ZeroWidthJoiner {
return c.stringWidthZeroJoiner(s)
g := uniseg.NewGraphemes(s)
for g.Next() {
var chWidth int
for _, r := range g.Runes() {
chWidth = c.RuneWidth(r)
if chWidth > 0 {
break // Our best guess at this point is to use the width of the first non-zero-width rune.
}
}
width += chWidth
}
return c.stringWidth(s)
return
}
// Truncate return string truncated with w cells
@ -149,19 +127,25 @@ func (c *Condition) Truncate(s string, w int, tail string) string {
if c.StringWidth(s) <= w {
return s
}
r := []rune(s)
tw := c.StringWidth(tail)
w -= tw
width := 0
i := 0
for ; i < len(r); i++ {
cw := c.RuneWidth(r[i])
if width+cw > w {
w -= c.StringWidth(tail)
var width int
pos := len(s)
g := uniseg.NewGraphemes(s)
for g.Next() {
var chWidth int
for _, r := range g.Runes() {
chWidth = c.RuneWidth(r)
if chWidth > 0 {
break // See StringWidth() for details.
}
}
if width+chWidth > w {
pos, _ = g.Positions()
break
}
width += cw
width += chWidth
}
return string(r[0:i]) + tail
return s[:pos] + tail
}
// Wrap return string wrapped with w cells
@ -169,7 +153,7 @@ func (c *Condition) Wrap(s string, w int) string {
width := 0
out := ""
for _, r := range []rune(s) {
cw := RuneWidth(r)
cw := c.RuneWidth(r)
if r == '\n' {
out += string(r)
width = 0

View File

@ -1,15 +0,0 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# goland idea folder
*.idea

View File

@ -1,5 +1,6 @@
language: go
go:
- 1.1.x
- 1.2.x
- 1.3.x
- 1.4.x
@ -10,7 +11,6 @@ go:
- 1.9.x
- 1.10.x
- 1.11.x
- 1.12.x
- tip
matrix:
allow_failures:

View File

@ -1,7 +1,6 @@
1. John Graham-Cumming http://jgc.org/
1. Mohammad Gufran https://github.com/Gufran
1. Steven Gutzwiller https://github.com/StevenGutzwiller
1. Andrew Krasichkov @buglloc https://github.com/buglloc
1. John Graham-Cumming http://jgc.org/
1. Mike Samuel mikesamuel@gmail.com
1. Dmitri Shuralyov shurcooL@gmail.com
1. https://github.com/opennota
1. https://github.com/opennota
1. https://github.com/Gufran

View File

@ -58,12 +58,10 @@ We expect to be supplied with well-formatted HTML (closing elements for every ap
### Supported Go Versions
bluemonday is tested against Go 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 1.10, 1.11, 1.12, and tip.
bluemonday is tested against Go 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, and tip.
We do not support Go 1.0 as we depend on `golang.org/x/net/html` which includes a reference to `io.ErrNoProgress` which did not exist in Go 1.0.
We support Go 1.1 but Travis no longer tests against it.
## Is it production ready?
*Yes*
@ -92,7 +90,7 @@ func main() {
// Do this once for each unique policy, and use the policy for the life of the program
// Policy creation/editing is not safe to use in multiple goroutines
p := bluemonday.UGCPolicy()
// The policy can then be used to sanitize lots of input and it is safe to use the policy in multiple goroutines
html := p.Sanitize(
`<a onblur="alert(secret)" href="http://www.google.com">Google</a>`,
@ -169,26 +167,12 @@ To add elements to a policy either add just the elements:
p.AllowElements("b", "strong")
```
Or using a regex:
_Note: if an element is added by name as shown above, any matching regex will be ignored_
It is also recommended to ensure multiple patterns don't overlap as order of execution is not guaranteed and can result in some rules being missed.
```go
p.AllowElementsMatching(regex.MustCompile(`^my-element-`))
```
Or add elements as a virtue of adding an attribute:
```go
// Not the recommended pattern, see the recommendation on using .Matching() below
p.AllowAttrs("nowrap").OnElements("td", "th")
```
Again, this also supports a regex pattern match alternative:
```go
p.AllowAttrs("nowrap").OnElementsMatching(regex.MustCompile(`^my-element-`))
```
Attributes can either be added to all elements:
```go
p.AllowAttrs("dir").Matching(regexp.MustCompile("(?i)rtl|ltr")).Globally()
@ -218,49 +202,6 @@ p := bluemonday.UGCPolicy()
p.AllowElements("fieldset", "select", "option")
```
### Inline CSS
Although it's possible to handle inline CSS using `AllowAttrs` with a `Matching` rule, writing a single monolithic regular expression to safely process all inline CSS which you wish to allow is not a trivial task. Instead of attempting to do so, you can whitelist the `style` attribute on whichever element(s) you desire and use style policies to control and sanitize inline styles.
It is suggested that you use `Matching` (with a suitable regular expression)
`MatchingEnum`, or `MatchingHandler` to ensure each style matches your needs,
but default handlers are supplied for most widely used styles.
Similar to attributes, you can allow specific CSS properties to be set inline:
```go
p.AllowAttrs("style").OnElements("span", "p")
// Allow the 'color' property with valid RGB(A) hex values only (on any element allowed a 'style' attribute)
p.AllowStyles("color").Matching(regexp.MustCompile("(?i)^#([0-9a-f]{3,4}|[0-9a-f]{6}|[0-9a-f]{8})$")).Globally()
```
Additionally, you can allow a CSS property to be set only to an allowed value:
```go
p.AllowAttrs("style").OnElements("span", "p")
// Allow the 'text-decoration' property to be set to 'underline', 'line-through' or 'none'
// on 'span' elements only
p.AllowStyles("text-decoration").MatchingEnum("underline", "line-through", "none").OnElements("span")
```
Or you can specify elements based on a regex patterm match:
```go
p.AllowAttrs("style").OnElementsMatching(regex.MustCompile(`^my-element-`))
// Allow the 'text-decoration' property to be set to 'underline', 'line-through' or 'none'
// on 'span' elements only
p.AllowStyles("text-decoration").MatchingEnum("underline", "line-through", "none").OnElementsMatching(regex.MustCompile(`^my-element-`))
```
If you need more specific checking, you can create a handler that takes in a string and returns a bool to
validate the values for a given property. The string parameter has been
converted to lowercase and unicode code points have been converted.
```go
myHandler := func(value string) bool{
return true
}
p.AllowAttrs("style").OnElements("span", "p")
// Allow the 'color' property with values validated by the handler (on any element allowed a 'style' attribute)
p.AllowStyles("color").MatchingHandler(myHandler).Globally()
```
### Links
Links are difficult beasts to sanitise safely and also one of the biggest attack vectors for malicious content.
@ -295,13 +236,6 @@ Regardless of whether you have enabled parseable URLs, you can force all URLs to
p.RequireNoFollowOnLinks(true)
```
Similarly, you can force all URLs to have "noreferrer" in their rel attribute.
```go
// This applies to "a" "area" "link" elements that have a "href" attribute
p.RequireNoReferrerOnLinks(true)
```
We provide a convenience method that applies all of the above, but you will still need to whitelist the linkable elements for the URL rules to be applied to:
```go
p.AllowStandardURLs()
@ -382,6 +316,7 @@ It is not the job of bluemonday to fix your bad HTML, it is merely the job of bl
## TODO
* Add support for CSS sanitisation to allow some CSS properties based on a whitelist, possibly using the [Gorilla CSS3 scanner](http://www.gorillatoolkit.org/pkg/css/scanner) - PRs welcome so long as testing covers XSS and demonstrates safety first
* Investigate whether devs want to blacklist elements and attributes. This would allow devs to take an existing policy (such as the `bluemonday.UGCPolicy()` ) that encapsulates 90% of what they're looking for but does more than they need, and to remove the extra things they do not want to make it 100% what they want
* Investigate whether devs want a validating HTML mode, in which the HTML elements are not just transformed into a balanced tree (every start tag has a closing tag at the correct depth) but also that elements and character data appear only in their allowed context (i.e. that a `table` element isn't a descendent of a `caption`, that `colgroup`, `thead`, `tbody`, `tfoot` and `tr` are permitted, and that character data is not permitted)

View File

@ -2,9 +2,4 @@ module github.com/microcosm-cc/bluemonday
go 1.9
require (
github.com/aymerick/douceur v0.2.0 // indirect
github.com/chris-ramon/douceur v0.2.0
github.com/gorilla/css v1.0.0 // indirect
golang.org/x/net v0.0.0-20181220203305-927f97764cc3
)
require golang.org/x/net v0.0.0-20181220203305-927f97764cc3

View File

@ -1,8 +1,2 @@
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU=
github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE=
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3 h1:eH6Eip3UpmR+yM/qI9Ijluzb1bNv/cAU/n+6l8tRSis=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=

File diff suppressed because it is too large Load Diff

View File

@ -135,7 +135,7 @@ func (p *Policy) AllowStandardURLs() {
// Most common URL schemes only
p.AllowURLSchemes("mailto", "http", "https")
// For linking elements we will add rel="nofollow" if it does not already exist
// For all anchors we will add rel="nofollow" if it does not already exist
// This applies to "a" "area" "link"
p.RequireNoFollowOnLinks(true)
}

View File

@ -29,8 +29,6 @@
package bluemonday
//TODO sgutzwiller create map of styles to default handlers
//TODO sgutzwiller create handlers for various attributes
import (
"net/url"
"regexp"
@ -53,22 +51,14 @@ type Policy struct {
// tag is replaced by a space character.
addSpaces bool
// When true, add rel="nofollow" to HTML a, area, and link tags
// When true, add rel="nofollow" to HTML anchors
requireNoFollow bool
// When true, add rel="nofollow" to HTML a, area, and link tags
// When true, add rel="nofollow" to HTML anchors
// Will add for href="http://foo"
// Will skip for href="/foo" or href="foo"
requireNoFollowFullyQualifiedLinks bool
// When true, add rel="noreferrer" to HTML a, area, and link tags
requireNoReferrer bool
// When true, add rel="noreferrer" to HTML a, area, and link tags
// Will add for href="http://foo"
// Will skip for href="/foo" or href="foo"
requireNoReferrerFullyQualifiedLinks bool
// When true add target="_blank" to fully qualified links
// Will add for href="http://foo"
// Will skip for href="/foo" or href="foo"
@ -86,21 +76,9 @@ type Policy struct {
// map[htmlElementName]map[htmlAttributeName]attrPolicy
elsAndAttrs map[string]map[string]attrPolicy
// elsMatchingAndAttrs stores regex based element matches along with attributes
elsMatchingAndAttrs map[*regexp.Regexp]map[string]attrPolicy
// map[htmlAttributeName]attrPolicy
globalAttrs map[string]attrPolicy
// map[htmlElementName]map[cssPropertyName]stylePolicy
elsAndStyles map[string]map[string]stylePolicy
// map[regex]map[cssPropertyName]stylePolicy
elsMatchingAndStyles map[*regexp.Regexp]map[string]stylePolicy
// map[cssPropertyName]stylePolicy
globalStyles map[string]stylePolicy
// If urlPolicy is nil, all URLs with matching schema are allowed.
// Otherwise, only the URLs with matching schema and urlPolicy(url)
// returning true are allowed.
@ -115,16 +93,6 @@ type Policy struct {
// be maintained in the output HTML.
setOfElementsAllowedWithoutAttrs map[string]struct{}
// If an element has had all attributes removed as a result of a policy
// being applied, then the element would be removed from the output.
//
// However some elements are valid and have strong layout meaning without
// any attributes, i.e. <table>.
//
// In this case, any element matching a regular expression will be accepted without
// attributes added.
setOfElementsMatchingAllowedWithoutAttrs []*regexp.Regexp
setOfElementsToSkipContent map[string]struct{}
}
@ -135,20 +103,6 @@ type attrPolicy struct {
regexp *regexp.Regexp
}
type stylePolicy struct {
// handler to validate
handler func(string) bool
// optional pattern to match, when not nil the regexp needs to match
// otherwise the property is removed
regexp *regexp.Regexp
// optional list of allowed property values, for properties which
// have a defined list of allowed values; property will be removed
// if the value is not allowed
enum []string
}
type attrPolicyBuilder struct {
p *Policy
@ -157,26 +111,13 @@ type attrPolicyBuilder struct {
allowEmpty bool
}
type stylePolicyBuilder struct {
p *Policy
propertyNames []string
regexp *regexp.Regexp
enum []string
handler func(string) bool
}
type urlPolicy func(url *url.URL) (allowUrl bool)
// init initializes the maps if this has not been done already
func (p *Policy) init() {
if !p.initialized {
p.elsAndAttrs = make(map[string]map[string]attrPolicy)
p.elsMatchingAndAttrs = make(map[*regexp.Regexp]map[string]attrPolicy)
p.globalAttrs = make(map[string]attrPolicy)
p.elsAndStyles = make(map[string]map[string]stylePolicy)
p.elsMatchingAndStyles = make(map[*regexp.Regexp]map[string]stylePolicy)
p.globalStyles = make(map[string]stylePolicy)
p.allowURLSchemes = make(map[string]urlPolicy)
p.setOfElementsAllowedWithoutAttrs = make(map[string]struct{})
p.setOfElementsToSkipContent = make(map[string]struct{})
@ -304,30 +245,6 @@ func (abp *attrPolicyBuilder) OnElements(elements ...string) *Policy {
return abp.p
}
// OnElementsMatching will bind an attribute policy to all elements matching a given regex
// and return the updated policy
func (abp *attrPolicyBuilder) OnElementsMatching(regex *regexp.Regexp) *Policy {
for _, attr := range abp.attrNames {
if _, ok := abp.p.elsMatchingAndAttrs[regex]; !ok {
abp.p.elsMatchingAndAttrs[regex] = make(map[string]attrPolicy)
}
ap := attrPolicy{}
if abp.regexp != nil {
ap.regexp = abp.regexp
}
abp.p.elsMatchingAndAttrs[regex][attr] = ap
}
if abp.allowEmpty {
abp.p.setOfElementsMatchingAllowedWithoutAttrs = append(abp.p.setOfElementsMatchingAllowedWithoutAttrs, regex)
if _, ok := abp.p.elsMatchingAndAttrs[regex]; !ok {
abp.p.elsMatchingAndAttrs[regex] = make(map[string]attrPolicy)
}
}
return abp.p
}
// Globally will bind an attribute policy to all HTML elements and return the
// updated policy
func (abp *attrPolicyBuilder) Globally() *Policy {
@ -348,139 +265,6 @@ func (abp *attrPolicyBuilder) Globally() *Policy {
return abp.p
}
// AllowStyles takes a range of CSS property names and returns a
// style policy builder that allows you to specify the pattern and scope of
// the whitelisted property.
//
// The style policy is only added to the core policy when either Globally()
// or OnElements(...) are called.
func (p *Policy) AllowStyles(propertyNames ...string) *stylePolicyBuilder {
p.init()
abp := stylePolicyBuilder{
p: p,
}
for _, propertyName := range propertyNames {
abp.propertyNames = append(abp.propertyNames, strings.ToLower(propertyName))
}
return &abp
}
// Matching allows a regular expression to be applied to a nascent style
// policy, and returns the style policy. Calling this more than once will
// replace the existing regexp.
func (spb *stylePolicyBuilder) Matching(regex *regexp.Regexp) *stylePolicyBuilder {
spb.regexp = regex
return spb
}
// MatchingEnum allows a list of allowed values to be applied to a nascent style
// policy, and returns the style policy. Calling this more than once will
// replace the existing list of allowed values.
func (spb *stylePolicyBuilder) MatchingEnum(enum ...string) *stylePolicyBuilder {
spb.enum = enum
return spb
}
// MatchingHandler allows a handler to be applied to a nascent style
// policy, and returns the style policy. Calling this more than once will
// replace the existing handler.
func (spb *stylePolicyBuilder) MatchingHandler(handler func(string) bool) *stylePolicyBuilder {
spb.handler = handler
return spb
}
// OnElements will bind a style policy to a given range of HTML elements
// and return the updated policy
func (spb *stylePolicyBuilder) OnElements(elements ...string) *Policy {
for _, element := range elements {
element = strings.ToLower(element)
for _, attr := range spb.propertyNames {
if _, ok := spb.p.elsAndStyles[element]; !ok {
spb.p.elsAndStyles[element] = make(map[string]stylePolicy)
}
sp := stylePolicy{}
if spb.handler != nil {
sp.handler = spb.handler
} else if len(spb.enum) > 0 {
sp.enum = spb.enum
} else if spb.regexp != nil {
sp.regexp = spb.regexp
} else {
sp.handler = getDefaultHandler(attr)
}
spb.p.elsAndStyles[element][attr] = sp
}
}
return spb.p
}
// OnElementsMatching will bind a style policy to any HTML elements matching the pattern
// and return the updated policy
func (spb *stylePolicyBuilder) OnElementsMatching(regex *regexp.Regexp) *Policy {
for _, attr := range spb.propertyNames {
if _, ok := spb.p.elsMatchingAndStyles[regex]; !ok {
spb.p.elsMatchingAndStyles[regex] = make(map[string]stylePolicy)
}
sp := stylePolicy{}
if spb.handler != nil {
sp.handler = spb.handler
} else if len(spb.enum) > 0 {
sp.enum = spb.enum
} else if spb.regexp != nil {
sp.regexp = spb.regexp
} else {
sp.handler = getDefaultHandler(attr)
}
spb.p.elsMatchingAndStyles[regex][attr] = sp
}
return spb.p
}
// Globally will bind a style policy to all HTML elements and return the
// updated policy
func (spb *stylePolicyBuilder) Globally() *Policy {
for _, attr := range spb.propertyNames {
if _, ok := spb.p.globalStyles[attr]; !ok {
spb.p.globalStyles[attr] = stylePolicy{}
}
// Use only one strategy for validating styles, fallback to default
sp := stylePolicy{}
if spb.handler != nil {
sp.handler = spb.handler
} else if len(spb.enum) > 0 {
sp.enum = spb.enum
} else if spb.regexp != nil {
sp.regexp = spb.regexp
} else {
sp.handler = getDefaultHandler(attr)
}
spb.p.globalStyles[attr] = sp
}
return spb.p
}
// AllowElements will append HTML elements to the whitelist without applying an
// attribute policy to those elements (the elements are permitted
// sans-attributes)
@ -498,16 +282,8 @@ func (p *Policy) AllowElements(names ...string) *Policy {
return p
}
func (p *Policy) AllowElementsMatching(regex *regexp.Regexp) *Policy {
p.init()
if _, ok := p.elsMatchingAndAttrs[regex]; !ok {
p.elsMatchingAndAttrs[regex] = make(map[string]attrPolicy)
}
return p
}
// RequireNoFollowOnLinks will result in all a, area, link tags having a
// rel="nofollow"added to them if one does not already exist
// RequireNoFollowOnLinks will result in all <a> tags having a rel="nofollow"
// added to them if one does not already exist
//
// Note: This requires p.RequireParseableURLs(true) and will enable it.
func (p *Policy) RequireNoFollowOnLinks(require bool) *Policy {
@ -518,10 +294,9 @@ func (p *Policy) RequireNoFollowOnLinks(require bool) *Policy {
return p
}
// RequireNoFollowOnFullyQualifiedLinks will result in all a, area, and link
// tags that point to a non-local destination (i.e. starts with a protocol and
// has a host) having a rel="nofollow" added to them if one does not already
// exist
// RequireNoFollowOnFullyQualifiedLinks will result in all <a> tags that point
// to a non-local destination (i.e. starts with a protocol and has a host)
// having a rel="nofollow" added to them if one does not already exist
//
// Note: This requires p.RequireParseableURLs(true) and will enable it.
func (p *Policy) RequireNoFollowOnFullyQualifiedLinks(require bool) *Policy {
@ -532,35 +307,9 @@ func (p *Policy) RequireNoFollowOnFullyQualifiedLinks(require bool) *Policy {
return p
}
// RequireNoReferrerOnLinks will result in all a, area, and link tags having a
// rel="noreferrrer" added to them if one does not already exist
//
// Note: This requires p.RequireParseableURLs(true) and will enable it.
func (p *Policy) RequireNoReferrerOnLinks(require bool) *Policy {
p.requireNoReferrer = require
p.requireParseableURLs = true
return p
}
// RequireNoReferrerOnFullyQualifiedLinks will result in all a, area, and link
// tags that point to a non-local destination (i.e. starts with a protocol and
// has a host) having a rel="noreferrer" added to them if one does not already
// exist
//
// Note: This requires p.RequireParseableURLs(true) and will enable it.
func (p *Policy) RequireNoReferrerOnFullyQualifiedLinks(require bool) *Policy {
p.requireNoReferrerFullyQualifiedLinks = require
p.requireParseableURLs = true
return p
}
// AddTargetBlankToFullyQualifiedLinks will result in all a, area and link tags
// that point to a non-local destination (i.e. starts with a protocol and has a
// host) having a target="_blank" added to them if one does not already exist
// AddTargetBlankToFullyQualifiedLinks will result in all <a> tags that point
// to a non-local destination (i.e. starts with a protocol and has a host)
// having a target="_blank" added to them if one does not already exist
//
// Note: This requires p.RequireParseableURLs(true) and will enable it.
func (p *Policy) AddTargetBlankToFullyQualifiedLinks(require bool) *Policy {

View File

@ -34,19 +34,15 @@ import (
"io"
"net/url"
"regexp"
"strconv"
"strings"
"golang.org/x/net/html"
cssparser "github.com/chris-ramon/douceur/parser"
)
var (
dataAttribute = regexp.MustCompile("^data-.+")
dataAttributeXMLPrefix = regexp.MustCompile("^xml.+")
dataAttributeInvalidChars = regexp.MustCompile("[A-Z;]+")
cssUnicodeChar = regexp.MustCompile(`\\[0-9a-f]{1,6} ?`)
)
// Sanitize takes a string that contains a HTML fragment or document and applies
@ -86,98 +82,6 @@ func (p *Policy) SanitizeReader(r io.Reader) *bytes.Buffer {
return p.sanitize(r)
}
const escapedURLChars = "'<>\"\r"
func escapeUrlComponent(val string) string {
w := bytes.NewBufferString("")
i := strings.IndexAny(val, escapedURLChars)
for i != -1 {
if _, err := w.WriteString(val[:i]); err != nil {
return w.String()
}
var esc string
switch val[i] {
case '\'':
// "&#39;" is shorter than "&apos;" and apos was not in HTML until HTML5.
esc = "&#39;"
case '<':
esc = "&lt;"
case '>':
esc = "&gt;"
case '"':
// "&#34;" is shorter than "&quot;".
esc = "&#34;"
case '\r':
esc = "&#13;"
default:
panic("unrecognized escape character")
}
val = val[i+1:]
if _, err := w.WriteString(esc); err != nil {
return w.String()
}
i = strings.IndexAny(val, escapedURLChars)
}
w.WriteString(val)
return w.String()
}
func sanitizedUrl(val string) (string, error) {
u, err := url.Parse(val)
if err != nil {
return "", err
}
// sanitize the url query params
sanitizedQueryValues := make(url.Values, 0)
queryValues := u.Query()
for k, vals := range queryValues {
sk := html.EscapeString(k)
for _, v := range vals {
sv := v
sanitizedQueryValues.Add(sk, sv)
}
}
u.RawQuery = sanitizedQueryValues.Encode()
// u.String() will also sanitize host/scheme/user/pass
return u.String(), nil
}
func (p *Policy) writeLinkableBuf(buff *bytes.Buffer, token *html.Token) {
// do not escape multiple query parameters
tokenBuff := bytes.NewBufferString("")
tokenBuff.WriteString("<")
tokenBuff.WriteString(token.Data)
for _, attr := range token.Attr {
tokenBuff.WriteByte(' ')
tokenBuff.WriteString(attr.Key)
tokenBuff.WriteString(`="`)
switch attr.Key {
case "href", "src":
u, ok := p.validURL(attr.Val)
if !ok {
tokenBuff.WriteString(html.EscapeString(attr.Val))
continue
}
u, err := sanitizedUrl(u)
if err == nil {
tokenBuff.WriteString(u)
} else {
// fallthrough
tokenBuff.WriteString(html.EscapeString(attr.Val))
}
default:
// re-apply
tokenBuff.WriteString(html.EscapeString(attr.Val))
}
tokenBuff.WriteByte('"')
}
if token.Type == html.SelfClosingTagToken {
tokenBuff.WriteString("/")
}
tokenBuff.WriteString(">")
buff.WriteString(tokenBuff.String())
}
// Performs the actual sanitization process.
func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
@ -229,23 +133,20 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
case html.StartTagToken:
mostRecentlyStartedToken = strings.ToLower(token.Data)
mostRecentlyStartedToken = token.Data
aps, ok := p.elsAndAttrs[token.Data]
if !ok {
aa, matched := p.matchRegex(token.Data)
if !matched {
if _, ok := p.setOfElementsToSkipContent[token.Data]; ok {
skipElementContent = true
skippingElementsCount++
}
if p.addSpaces {
buff.WriteString(" ")
}
break
if _, ok := p.setOfElementsToSkipContent[token.Data]; ok {
skipElementContent = true
skippingElementsCount++
}
aps = aa
if p.addSpaces {
buff.WriteString(" ")
}
break
}
if len(token.Attr) != 0 {
token.Attr = p.sanitizeAttrs(token.Data, token.Attr, aps)
}
@ -262,17 +163,12 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
}
if !skipElementContent {
// do not escape multiple query parameters
if linkable(token.Data) {
p.writeLinkableBuf(&buff, &token)
} else {
buff.WriteString(token.String())
}
buff.WriteString(token.String())
}
case html.EndTagToken:
if mostRecentlyStartedToken == strings.ToLower(token.Data) {
if mostRecentlyStartedToken == token.Data {
mostRecentlyStartedToken = ""
}
@ -286,27 +182,18 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
}
break
}
if _, ok := p.elsAndAttrs[token.Data]; !ok {
match := false
for regex := range p.elsMatchingAndAttrs {
if regex.MatchString(token.Data) {
skipElementContent = false
match = true
break
}
}
if _, ok := p.setOfElementsToSkipContent[token.Data]; ok && !match {
if _, ok := p.setOfElementsToSkipContent[token.Data]; ok {
skippingElementsCount--
if skippingElementsCount == 0 {
skipElementContent = false
}
}
if !match {
if p.addSpaces {
buff.WriteString(" ")
}
break
if p.addSpaces {
buff.WriteString(" ")
}
break
}
if !skipElementContent {
@ -317,14 +204,10 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
aps, ok := p.elsAndAttrs[token.Data]
if !ok {
aa, matched := p.matchRegex(token.Data)
if !matched {
if p.addSpaces && !matched {
buff.WriteString(" ")
}
break
if p.addSpaces {
buff.WriteString(" ")
}
aps = aa
break
}
if len(token.Attr) != 0 {
@ -334,16 +217,12 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
if len(token.Attr) == 0 && !p.allowNoAttrs(token.Data) {
if p.addSpaces {
buff.WriteString(" ")
break
}
break
}
if !skipElementContent {
// do not escape multiple query parameters
if linkable(token.Data) {
p.writeLinkableBuf(&buff, &token)
} else {
buff.WriteString(token.String())
}
buff.WriteString(token.String())
}
case html.TextToken:
@ -363,7 +242,6 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
buff.WriteString(token.String())
}
}
default:
// A token that didn't exist in the html package when we wrote this
return &bytes.Buffer{}
@ -384,23 +262,6 @@ func (p *Policy) sanitizeAttrs(
return attrs
}
hasStylePolicies := false
sps, elementHasStylePolicies := p.elsAndStyles[elementName]
if len(p.globalStyles) > 0 || (elementHasStylePolicies && len(sps) > 0) {
hasStylePolicies = true
}
// no specific element policy found, look for a pattern match
if !hasStylePolicies {
for k, v := range p.elsMatchingAndStyles {
if k.MatchString(elementName) {
if len(v) > 0 {
hasStylePolicies = true
break
}
}
}
}
// Builds a new attribute slice based on the whether the attribute has been
// whitelisted explicitly or globally.
cleanAttrs := []html.Attribute{}
@ -412,19 +273,6 @@ func (p *Policy) sanitizeAttrs(
continue
}
}
// Is this a "style" attribute, and if so, do we need to sanitize it?
if htmlAttr.Key == "style" && hasStylePolicies {
htmlAttr = p.sanitizeStyles(htmlAttr, elementName)
if htmlAttr.Val == "" {
// We've sanitized away any and all styles; don't bother to
// output the style attribute (even if it's allowed)
continue
} else {
cleanAttrs = append(cleanAttrs, htmlAttr)
continue
}
}
// Is there an element specific attribute policy that applies?
if ap, ok := aps[htmlAttr.Key]; ok {
if ap.regexp != nil {
@ -506,8 +354,6 @@ func (p *Policy) sanitizeAttrs(
if (p.requireNoFollow ||
p.requireNoFollowFullyQualifiedLinks ||
p.requireNoReferrer ||
p.requireNoReferrerFullyQualifiedLinks ||
p.addTargetBlankToFullyQualifiedLinks) &&
len(cleanAttrs) > 0 {
@ -535,16 +381,12 @@ func (p *Policy) sanitizeAttrs(
if hrefFound {
var (
noFollowFound bool
noReferrerFound bool
targetBlankFound bool
)
addNoFollow := (p.requireNoFollow ||
externalLink && p.requireNoFollowFullyQualifiedLinks)
addNoReferrer := (p.requireNoReferrer ||
externalLink && p.requireNoReferrerFullyQualifiedLinks)
addTargetBlank := (externalLink &&
p.addTargetBlankToFullyQualifiedLinks)
@ -552,18 +394,18 @@ func (p *Policy) sanitizeAttrs(
for _, htmlAttr := range cleanAttrs {
var appended bool
if htmlAttr.Key == "rel" && (addNoFollow || addNoReferrer) {
if htmlAttr.Key == "rel" && addNoFollow {
if addNoFollow && !strings.Contains(htmlAttr.Val, "nofollow") {
if strings.Contains(htmlAttr.Val, "nofollow") {
noFollowFound = true
tmpAttrs = append(tmpAttrs, htmlAttr)
appended = true
} else {
htmlAttr.Val += " nofollow"
noFollowFound = true
tmpAttrs = append(tmpAttrs, htmlAttr)
appended = true
}
if addNoReferrer && !strings.Contains(htmlAttr.Val, "noreferrer") {
htmlAttr.Val += " noreferrer"
}
noFollowFound = addNoFollow
noReferrerFound = addNoReferrer
tmpAttrs = append(tmpAttrs, htmlAttr)
appended = true
}
if elementName == "a" && htmlAttr.Key == "target" {
@ -582,22 +424,14 @@ func (p *Policy) sanitizeAttrs(
tmpAttrs = append(tmpAttrs, htmlAttr)
}
}
if noFollowFound || noReferrerFound || targetBlankFound {
if noFollowFound || targetBlankFound {
cleanAttrs = tmpAttrs
}
if (addNoFollow && !noFollowFound) || (addNoReferrer && !noReferrerFound) {
if addNoFollow && !noFollowFound {
rel := html.Attribute{}
rel.Key = "rel"
if addNoFollow {
rel.Val = "nofollow"
}
if addNoReferrer {
if rel.Val != "" {
rel.Val += " "
}
rel.Val += "noreferrer"
}
rel.Val = "nofollow"
cleanAttrs = append(cleanAttrs, rel)
}
@ -667,95 +501,8 @@ func (p *Policy) sanitizeAttrs(
return cleanAttrs
}
func (p *Policy) sanitizeStyles(attr html.Attribute, elementName string) html.Attribute {
sps := p.elsAndStyles[elementName]
if len(sps) == 0 {
sps = map[string]stylePolicy{}
// check for any matching elements, if we don't already have a policy found
// if multiple matches are found they will be overwritten, it's best
// to not have overlapping matchers
for regex, policies := range p.elsMatchingAndStyles {
if regex.MatchString(elementName) {
for k, v := range policies {
sps[k] = v
}
}
}
}
//Add semi-colon to end to fix parsing issue
if len(attr.Val) > 0 && attr.Val[len(attr.Val)-1] != ';' {
attr.Val = attr.Val + ";"
}
decs, err := cssparser.ParseDeclarations(attr.Val)
if err != nil {
attr.Val = ""
return attr
}
clean := []string{}
prefixes := []string{"-webkit-", "-moz-", "-ms-", "-o-", "mso-", "-xv-", "-atsc-", "-wap-", "-khtml-", "prince-", "-ah-", "-hp-", "-ro-", "-rim-", "-tc-"}
for _, dec := range decs {
addedProperty := false
tempProperty := strings.ToLower(dec.Property)
tempValue := removeUnicode(strings.ToLower(dec.Value))
for _, i := range prefixes {
tempProperty = strings.TrimPrefix(tempProperty, i)
}
if sp, ok := sps[tempProperty]; ok {
if sp.handler != nil {
if sp.handler(tempValue) {
clean = append(clean, dec.Property+": "+dec.Value)
addedProperty = true
}
} else if len(sp.enum) > 0 {
if stringInSlice(tempValue, sp.enum) {
clean = append(clean, dec.Property+": "+dec.Value)
addedProperty = true
}
} else if sp.regexp != nil {
if sp.regexp.MatchString(tempValue) {
clean = append(clean, dec.Property+": "+dec.Value)
addedProperty = true
}
continue
}
}
if sp, ok := p.globalStyles[tempProperty]; ok && !addedProperty {
if sp.handler != nil {
if sp.handler(tempValue) {
clean = append(clean, dec.Property+": "+dec.Value)
}
} else if len(sp.enum) > 0 {
if stringInSlice(tempValue, sp.enum) {
clean = append(clean, dec.Property+": "+dec.Value)
}
} else if sp.regexp != nil {
if sp.regexp.MatchString(tempValue) {
clean = append(clean, dec.Property+": "+dec.Value)
}
continue
}
}
}
if len(clean) > 0 {
attr.Val = strings.Join(clean, "; ")
} else {
attr.Val = ""
}
return attr
}
func (p *Policy) allowNoAttrs(elementName string) bool {
_, ok := p.setOfElementsAllowedWithoutAttrs[elementName]
if !ok {
for _, r := range p.setOfElementsMatchingAllowedWithoutAttrs {
if r.MatchString(elementName) {
ok = true
break
}
}
}
return ok
}
@ -814,16 +561,6 @@ func linkable(elementName string) bool {
}
}
// stringInSlice returns true if needle exists in haystack
func stringInSlice(needle string, haystack []string) bool {
for _, straw := range haystack {
if strings.ToLower(straw) == strings.ToLower(needle) {
return true
}
}
return false
}
func isDataAttribute(val string) bool {
if !dataAttribute.MatchString(val) {
return false
@ -842,48 +579,3 @@ func isDataAttribute(val string) bool {
}
return true
}
func removeUnicode(value string) string {
substitutedValue := value
currentLoc := cssUnicodeChar.FindStringIndex(substitutedValue)
for currentLoc != nil {
character := substitutedValue[currentLoc[0]+1 : currentLoc[1]]
character = strings.TrimSpace(character)
if len(character) < 4 {
character = strings.Repeat("0", 4-len(character)) + character
} else {
for len(character) > 4 {
if character[0] != '0' {
character = ""
break
} else {
character = character[1:]
}
}
}
character = "\\u" + character
translatedChar, err := strconv.Unquote(`"` + character + `"`)
translatedChar = strings.TrimSpace(translatedChar)
if err != nil {
return ""
}
substitutedValue = substitutedValue[0:currentLoc[0]] + translatedChar + substitutedValue[currentLoc[1]:]
currentLoc = cssUnicodeChar.FindStringIndex(substitutedValue)
}
return substitutedValue
}
func (p *Policy) matchRegex(elementName string) (map[string]attrPolicy, bool) {
aps := make(map[string]attrPolicy, 0)
matched := false
for regex, attrs := range p.elsMatchingAndAttrs {
if regex.MatchString(elementName) {
matched = true
for k, v := range attrs {
aps[k] = v
}
}
}
return aps, matched
}

View File

@ -11,16 +11,11 @@ type Buffer struct {
bytes.Buffer
}
// PrintableRuneWidth returns the width of all printable runes in the buffer.
func (w Buffer) PrintableRuneWidth() int {
return PrintableRuneWidth(w.String())
}
func PrintableRuneWidth(s string) int {
// PrintableRuneCount returns the amount of printable runes in the buffer.
func (w Buffer) PrintableRuneCount() int {
var n int
var ansi bool
for _, c := range s {
for _, c := range w.String() {
if c == '\x1B' {
// ANSI escape sequence
ansi = true

View File

@ -66,7 +66,7 @@ func (w *WordWrap) addSpace() {
func (w *WordWrap) addWord() {
if w.word.Len() > 0 {
w.addSpace()
w.lineLen += w.word.PrintableRuneWidth()
w.lineLen += w.word.PrintableRuneCount()
w.buf.Write(w.word.Bytes())
w.word.Reset()
}
@ -139,8 +139,8 @@ func (w *WordWrap) Write(b []byte) (int, error) {
// add a line break if the current word would exceed the line's
// character limit
if w.lineLen+w.space.Len()+w.word.PrintableRuneWidth() > w.Limit &&
w.word.PrintableRuneWidth() < w.Limit {
if w.lineLen+w.space.Len()+w.word.PrintableRuneCount() > w.Limit &&
w.word.PrintableRuneCount() < w.Limit {
w.addNewLine()
}
}

View File

@ -1,8 +1,8 @@
language: go
arch:
- ppc64le
- amd64
go:
- 1.1
- 1.2
- 1.3
- 1.4
- 1.5
@ -12,3 +12,11 @@ go:
- 1.9
- "1.10"
- tip
jobs:
exclude :
- arch : ppc64le
go :
- 1.3
- arch : ppc64le
go :
- 1.4

View File

@ -25,7 +25,7 @@ Generate ASCII table on the fly ... Installation is simple as
- Set custom footer support
- Optional identical cells merging
- Set custom caption
- Optional reflowing of paragrpahs in multi-line cells.
- Optional reflowing of paragraphs in multi-line cells.
#### Example 1 - Basic
```go
@ -197,6 +197,41 @@ table.Render()
+----------+--------------------------+-------+---------+
```
#### Example 7 - Identical cells merging (specify the column index to merge)
```go
data := [][]string{
[]string{"1/1/2014", "Domain name", "1234", "$10.98"},
[]string{"1/1/2014", "January Hosting", "1234", "$10.98"},
[]string{"1/4/2014", "February Hosting", "3456", "$51.00"},
[]string{"1/4/2014", "February Extra Bandwidth", "4567", "$30.00"},
}
table := tablewriter.NewWriter(os.Stdout)
table.SetHeader([]string{"Date", "Description", "CV2", "Amount"})
table.SetFooter([]string{"", "", "Total", "$146.93"})
table.SetAutoMergeCellsByColumnIndex([]int{2, 3})
table.SetRowLine(true)
table.AppendBulk(data)
table.Render()
```
##### Output 7
```
+----------+--------------------------+-------+---------+
| DATE | DESCRIPTION | CV2 | AMOUNT |
+----------+--------------------------+-------+---------+
| 1/1/2014 | Domain name | 1234 | $10.98 |
+----------+--------------------------+ + +
| 1/1/2014 | January Hosting | | |
+----------+--------------------------+-------+---------+
| 1/4/2014 | February Hosting | 3456 | $51.00 |
+----------+--------------------------+-------+---------+
| 1/4/2014 | February Extra Bandwidth | 4567 | $30.00 |
+----------+--------------------------+-------+---------+
| TOTAL | $146.93 |
+----------+--------------------------+-------+---------+
```
#### Table with color
```go
@ -233,7 +268,7 @@ table.Render()
#### Table with color Output
![Table with Color](https://cloud.githubusercontent.com/assets/6460392/21101956/bbc7b356-c0a1-11e6-9f36-dba694746efc.png)
#### Example - 7 Table Cells with Color
#### Example - 8 Table Cells with Color
Individual Cell Colors from `func Rich` take precedence over Column Colors
@ -289,7 +324,7 @@ table.Render()
##### Table cells with color Output
![Table cells with Color](https://user-images.githubusercontent.com/9064687/63969376-bcd88d80-ca6f-11e9-9466-c3d954700b25.png)
#### Example 8 - Set table caption
#### Example 9 - Set table caption
```go
data := [][]string{
[]string{"A", "The Good", "500"},
@ -310,7 +345,7 @@ table.Render() // Send output
Note: Caption text will wrap with total width of rendered table.
##### Output 7
##### Output 9
```
+------+-----------------------+--------+
| NAME | SIGN | RATING |
@ -323,7 +358,7 @@ Note: Caption text will wrap with total width of rendered table.
Movie ratings.
```
#### Example 8 - Set NoWhiteSpace and TablePadding option
#### Example 10 - Set NoWhiteSpace and TablePadding option
```go
data := [][]string{
{"node1.example.com", "Ready", "compute", "1.11"},
@ -349,7 +384,7 @@ table.AppendBulk(data) // Add Bulk Data
table.Render()
```
##### Output 8
##### Output 10
```
NAME STATUS ROLE VERSION
node1.example.com Ready compute 1.11

View File

@ -2,4 +2,4 @@ module github.com/olekukonko/tablewriter
go 1.12
require github.com/mattn/go-runewidth v0.0.7
require github.com/mattn/go-runewidth v0.0.9

View File

@ -1,2 +1,2 @@
github.com/mattn/go-runewidth v0.0.7 h1:Ei8KR0497xHyKJPAv59M1dkC+rOZCMBJ+t3fZ+twI54=
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=

View File

@ -48,39 +48,40 @@ type Border struct {
}
type Table struct {
out io.Writer
rows [][]string
lines [][][]string
cs map[int]int
rs map[int]int
headers [][]string
footers [][]string
caption bool
captionText string
autoFmt bool
autoWrap bool
reflowText bool
mW int
pCenter string
pRow string
pColumn string
tColumn int
tRow int
hAlign int
fAlign int
align int
newLine string
rowLine bool
autoMergeCells bool
noWhiteSpace bool
tablePadding string
hdrLine bool
borders Border
colSize int
headerParams []string
columnsParams []string
footerParams []string
columnsAlign []int
out io.Writer
rows [][]string
lines [][][]string
cs map[int]int
rs map[int]int
headers [][]string
footers [][]string
caption bool
captionText string
autoFmt bool
autoWrap bool
reflowText bool
mW int
pCenter string
pRow string
pColumn string
tColumn int
tRow int
hAlign int
fAlign int
align int
newLine string
rowLine bool
autoMergeCells bool
columnsToAutoMergeCells map[int]bool
noWhiteSpace bool
tablePadding string
hdrLine bool
borders Border
colSize int
headerParams []string
columnsParams []string
footerParams []string
columnsAlign []int
}
// Start New Table
@ -276,6 +277,21 @@ func (t *Table) SetAutoMergeCells(auto bool) {
t.autoMergeCells = auto
}
// Set Auto Merge Cells By Column Index
// This would enable / disable the merge of cells with identical values for specific columns
// If cols is empty, it is the same as `SetAutoMergeCells(true)`.
func (t *Table) SetAutoMergeCellsByColumnIndex(cols []int) {
t.autoMergeCells = true
if len(cols) > 0 {
m := make(map[int]bool)
for _, col := range cols {
m[col] = true
}
t.columnsToAutoMergeCells = m
}
}
// Set Table Border
// This would enable / disable line around the table
func (t *Table) SetBorder(border bool) {
@ -830,9 +846,19 @@ func (t *Table) printRowMergeCells(writer io.Writer, columns [][]string, rowIdx
}
if t.autoMergeCells {
var mergeCell bool
if t.columnsToAutoMergeCells != nil {
// Check to see if the column index is in columnsToAutoMergeCells.
if t.columnsToAutoMergeCells[y] {
mergeCell = true
}
} else {
// columnsToAutoMergeCells was not set.
mergeCell = true
}
//Store the full line to merge mutli-lines cells
fullLine := strings.TrimRight(strings.Join(columns[y], " "), " ")
if len(previousLine) > y && fullLine == previousLine[y] && fullLine != "" {
if len(previousLine) > y && fullLine == previousLine[y] && fullLine != "" && mergeCell {
// If this cell is identical to the one above but not empty, we don't display the border and keep the cell empty.
displayCellBorder = append(displayCellBorder, false)
str = ""

View File

@ -1,6 +1,6 @@
The MIT License (MIT)
MIT License
Copyright (c) 2015 Aymerick JEHANNE
Copyright (c) 2019 Oliver Kuederle
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

62
vendor/github.com/rivo/uniseg/README.md generated vendored Normal file
View File

@ -0,0 +1,62 @@
# Unicode Text Segmentation for Go
[![Godoc Reference](https://img.shields.io/badge/godoc-reference-blue.svg)](https://godoc.org/github.com/rivo/uniseg)
[![Go Report](https://img.shields.io/badge/go%20report-A%2B-brightgreen.svg)](https://goreportcard.com/report/github.com/rivo/uniseg)
This Go package implements Unicode Text Segmentation according to [Unicode Standard Annex #29](http://unicode.org/reports/tr29/) (Unicode version 12.0.0).
At this point, only the determination of grapheme cluster boundaries is implemented.
## Background
In Go, [strings are read-only slices of bytes](https://blog.golang.org/strings). They can be turned into Unicode code points using the `for` loop or by casting: `[]rune(str)`. However, multiple code points may be combined into one user-perceived character or what the Unicode specification calls "grapheme cluster". Here are some examples:
|String|Bytes (UTF-8)|Code points (runes)|Grapheme clusters|
|-|-|-|-|
|Käse|6 bytes: `4b 61 cc 88 73 65`|5 code points: `4b 61 308 73 65`|4 clusters: `[4b],[61 308],[73],[65]`|
|🏳️‍🌈|14 bytes: `f0 9f 8f b3 ef b8 8f e2 80 8d f0 9f 8c 88`|4 code points: `1f3f3 fe0f 200d 1f308`|1 cluster: `[1f3f3 fe0f 200d 1f308]`|
|🇩🇪|8 bytes: `f0 9f 87 a9 f0 9f 87 aa`|2 code points: `1f1e9 1f1ea`|1 cluster: `[1f1e9 1f1ea]`|
This package provides a tool to iterate over these grapheme clusters. This may be used to determine the number of user-perceived characters, to split strings in their intended places, or to extract individual characters which form a unit.
## Installation
```bash
go get github.com/rivo/uniseg
```
## Basic Example
```go
package uniseg
import (
"fmt"
"github.com/rivo/uniseg"
)
func main() {
gr := uniseg.NewGraphemes("👍🏼!")
for gr.Next() {
fmt.Printf("%x ", gr.Runes())
}
// Output: [1f44d 1f3fc] [21]
}
```
## Documentation
Refer to https://godoc.org/github.com/rivo/uniseg for the package's documentation.
## Dependencies
This package does not depend on any packages outside the standard library.
## Your Feedback
Add your issue here on GitHub. Feel free to get in touch if you have any questions.
## Version
Version tags will be introduced once Golang modules are official. Consider this version 0.1.

8
vendor/github.com/rivo/uniseg/doc.go generated vendored Normal file
View File

@ -0,0 +1,8 @@
/*
Package uniseg implements Unicode Text Segmentation according to Unicode
Standard Annex #29 (http://unicode.org/reports/tr29/).
At this point, only the determination of grapheme cluster boundaries is
implemented.
*/
package uniseg

3
vendor/github.com/rivo/uniseg/go.mod generated vendored Normal file
View File

@ -0,0 +1,3 @@
module github.com/rivo/uniseg
go 1.12

268
vendor/github.com/rivo/uniseg/grapheme.go generated vendored Normal file
View File

@ -0,0 +1,268 @@
package uniseg
import "unicode/utf8"
// The states of the grapheme cluster parser.
const (
grAny = iota
grCR
grControlLF
grL
grLVV
grLVTT
grPrepend
grExtendedPictographic
grExtendedPictographicZWJ
grRIOdd
grRIEven
)
// The grapheme cluster parser's breaking instructions.
const (
grNoBoundary = iota
grBoundary
)
// The grapheme cluster parser's state transitions. Maps (state, property) to
// (new state, breaking instruction, rule number). The breaking instruction
// always refers to the boundary between the last and next code point.
//
// This map is queried as follows:
//
// 1. Find specific state + specific property. Stop if found.
// 2. Find specific state + any property.
// 3. Find any state + specific property.
// 4. If only (2) or (3) (but not both) was found, stop.
// 5. If both (2) and (3) were found, use state and breaking instruction from
// the transition with the lower rule number, prefer (3) if rule numbers
// are equal. Stop.
// 6. Assume grAny and grBoundary.
var grTransitions = map[[2]int][3]int{
// GB5
{grAny, prCR}: {grCR, grBoundary, 50},
{grAny, prLF}: {grControlLF, grBoundary, 50},
{grAny, prControl}: {grControlLF, grBoundary, 50},
// GB4
{grCR, prAny}: {grAny, grBoundary, 40},
{grControlLF, prAny}: {grAny, grBoundary, 40},
// GB3.
{grCR, prLF}: {grAny, grNoBoundary, 30},
// GB6.
{grAny, prL}: {grL, grBoundary, 9990},
{grL, prL}: {grL, grNoBoundary, 60},
{grL, prV}: {grLVV, grNoBoundary, 60},
{grL, prLV}: {grLVV, grNoBoundary, 60},
{grL, prLVT}: {grLVTT, grNoBoundary, 60},
// GB7.
{grAny, prLV}: {grLVV, grBoundary, 9990},
{grAny, prV}: {grLVV, grBoundary, 9990},
{grLVV, prV}: {grLVV, grNoBoundary, 70},
{grLVV, prT}: {grLVTT, grNoBoundary, 70},
// GB8.
{grAny, prLVT}: {grLVTT, grBoundary, 9990},
{grAny, prT}: {grLVTT, grBoundary, 9990},
{grLVTT, prT}: {grLVTT, grNoBoundary, 80},
// GB9.
{grAny, prExtend}: {grAny, grNoBoundary, 90},
{grAny, prZWJ}: {grAny, grNoBoundary, 90},
// GB9a.
{grAny, prSpacingMark}: {grAny, grNoBoundary, 91},
// GB9b.
{grAny, prPreprend}: {grPrepend, grBoundary, 9990},
{grPrepend, prAny}: {grAny, grNoBoundary, 92},
// GB11.
{grAny, prExtendedPictographic}: {grExtendedPictographic, grBoundary, 9990},
{grExtendedPictographic, prExtend}: {grExtendedPictographic, grNoBoundary, 110},
{grExtendedPictographic, prZWJ}: {grExtendedPictographicZWJ, grNoBoundary, 110},
{grExtendedPictographicZWJ, prExtendedPictographic}: {grExtendedPictographic, grNoBoundary, 110},
// GB12 / GB13.
{grAny, prRegionalIndicator}: {grRIOdd, grBoundary, 9990},
{grRIOdd, prRegionalIndicator}: {grRIEven, grNoBoundary, 120},
{grRIEven, prRegionalIndicator}: {grRIOdd, grBoundary, 120},
}
// Graphemes implements an iterator over Unicode extended grapheme clusters,
// specified in the Unicode Standard Annex #29. Grapheme clusters correspond to
// "user-perceived characters". These characters often consist of multiple
// code points (e.g. the "woman kissing woman" emoji consists of 8 code points:
// woman + ZWJ + heavy black heart (2 code points) + ZWJ + kiss mark + ZWJ +
// woman) and the rules described in Annex #29 must be applied to group those
// code points into clusters perceived by the user as one character.
type Graphemes struct {
// The code points over which this class iterates.
codePoints []rune
// The (byte-based) indices of the code points into the original string plus
// len(original string). Thus, len(indices) = len(codePoints) + 1.
indices []int
// The current grapheme cluster to be returned. These are indices into
// codePoints/indices. If start == end, we either haven't started iterating
// yet (0) or the iteration has already completed (1).
start, end int
// The index of the next code point to be parsed.
pos int
// The current state of the code point parser.
state int
}
// NewGraphemes returns a new grapheme cluster iterator.
func NewGraphemes(s string) *Graphemes {
l := utf8.RuneCountInString(s)
codePoints := make([]rune, l)
indices := make([]int, l+1)
i := 0
for pos, r := range s {
codePoints[i] = r
indices[i] = pos
i++
}
indices[l] = len(s)
g := &Graphemes{
codePoints: codePoints,
indices: indices,
}
g.Next() // Parse ahead.
return g
}
// Next advances the iterator by one grapheme cluster and returns false if no
// clusters are left. This function must be called before the first cluster is
// accessed.
func (g *Graphemes) Next() bool {
g.start = g.end
// The state transition gives us a boundary instruction BEFORE the next code
// point so we always need to stay ahead by one code point.
// Parse the next code point.
for g.pos <= len(g.codePoints) {
// GB2.
if g.pos == len(g.codePoints) {
g.end = g.pos
g.pos++
break
}
// Determine the property of the next character.
nextProperty := property(g.codePoints[g.pos])
g.pos++
// Find the applicable transition.
var boundary bool
transition, ok := grTransitions[[2]int{g.state, nextProperty}]
if ok {
// We have a specific transition. We'll use it.
g.state = transition[0]
boundary = transition[1] == grBoundary
} else {
// No specific transition found. Try the less specific ones.
transAnyProp, okAnyProp := grTransitions[[2]int{g.state, prAny}]
transAnyState, okAnyState := grTransitions[[2]int{grAny, nextProperty}]
if okAnyProp && okAnyState {
// Both apply. We'll use a mix (see comments for grTransitions).
g.state = transAnyState[0]
boundary = transAnyState[1] == grBoundary
if transAnyProp[2] < transAnyState[2] {
g.state = transAnyProp[0]
boundary = transAnyProp[1] == grBoundary
}
} else if okAnyProp {
// We only have a specific state.
g.state = transAnyProp[0]
boundary = transAnyProp[1] == grBoundary
// This branch will probably never be reached because okAnyState will
// always be true given the current transition map. But we keep it here
// for future modifications to the transition map where this may not be
// true anymore.
} else if okAnyState {
// We only have a specific property.
g.state = transAnyState[0]
boundary = transAnyState[1] == grBoundary
} else {
// No known transition. GB999: Any x Any.
g.state = grAny
boundary = true
}
}
// If we found a cluster boundary, let's stop here. The current cluster will
// be the one that just ended.
if g.pos-1 == 0 /* GB1 */ || boundary {
g.end = g.pos - 1
break
}
}
return g.start != g.end
}
// Runes returns a slice of runes (code points) which corresponds to the current
// grapheme cluster. If the iterator is already past the end or Next() has not
// yet been called, nil is returned.
func (g *Graphemes) Runes() []rune {
if g.start == g.end {
return nil
}
return g.codePoints[g.start:g.end]
}
// Str returns a substring of the original string which corresponds to the
// current grapheme cluster. If the iterator is already past the end or Next()
// has not yet been called, an empty string is returned.
func (g *Graphemes) Str() string {
if g.start == g.end {
return ""
}
return string(g.codePoints[g.start:g.end])
}
// Bytes returns a byte slice which corresponds to the current grapheme cluster.
// If the iterator is already past the end or Next() has not yet been called,
// nil is returned.
func (g *Graphemes) Bytes() []byte {
if g.start == g.end {
return nil
}
return []byte(string(g.codePoints[g.start:g.end]))
}
// Positions returns the interval of the current grapheme cluster as byte
// positions into the original string. The first returned value "from" indexes
// the first byte and the second returned value "to" indexes the first byte that
// is not included anymore, i.e. str[from:to] is the current grapheme cluster of
// the original string "str". If Next() has not yet been called, both values are
// 0. If the iterator is already past the end, both values are 1.
func (g *Graphemes) Positions() (int, int) {
return g.indices[g.start], g.indices[g.end]
}
// Reset puts the iterator into its initial state such that the next call to
// Next() sets it to the first grapheme cluster again.
func (g *Graphemes) Reset() {
g.start, g.end, g.pos, g.state = 0, 0, 0, grAny
g.Next() // Parse ahead again.
}
// GraphemeClusterCount returns the number of user-perceived characters
// (grapheme clusters) for the given string. To calculate this number, it
// iterates through the string using the Graphemes iterator.
func GraphemeClusterCount(s string) (n int) {
g := NewGraphemes(s)
for g.Next() {
n++
}
return
}

1658
vendor/github.com/rivo/uniseg/properties.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -13,12 +13,42 @@ const (
compareGreater
)
var (
intType = reflect.TypeOf(int(1))
int8Type = reflect.TypeOf(int8(1))
int16Type = reflect.TypeOf(int16(1))
int32Type = reflect.TypeOf(int32(1))
int64Type = reflect.TypeOf(int64(1))
uintType = reflect.TypeOf(uint(1))
uint8Type = reflect.TypeOf(uint8(1))
uint16Type = reflect.TypeOf(uint16(1))
uint32Type = reflect.TypeOf(uint32(1))
uint64Type = reflect.TypeOf(uint64(1))
float32Type = reflect.TypeOf(float32(1))
float64Type = reflect.TypeOf(float64(1))
stringType = reflect.TypeOf("")
)
func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
obj1Value := reflect.ValueOf(obj1)
obj2Value := reflect.ValueOf(obj2)
// throughout this switch we try and avoid calling .Convert() if possible,
// as this has a pretty big performance impact
switch kind {
case reflect.Int:
{
intobj1 := obj1.(int)
intobj2 := obj2.(int)
intobj1, ok := obj1.(int)
if !ok {
intobj1 = obj1Value.Convert(intType).Interface().(int)
}
intobj2, ok := obj2.(int)
if !ok {
intobj2 = obj2Value.Convert(intType).Interface().(int)
}
if intobj1 > intobj2 {
return compareGreater, true
}
@ -31,8 +61,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Int8:
{
int8obj1 := obj1.(int8)
int8obj2 := obj2.(int8)
int8obj1, ok := obj1.(int8)
if !ok {
int8obj1 = obj1Value.Convert(int8Type).Interface().(int8)
}
int8obj2, ok := obj2.(int8)
if !ok {
int8obj2 = obj2Value.Convert(int8Type).Interface().(int8)
}
if int8obj1 > int8obj2 {
return compareGreater, true
}
@ -45,8 +81,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Int16:
{
int16obj1 := obj1.(int16)
int16obj2 := obj2.(int16)
int16obj1, ok := obj1.(int16)
if !ok {
int16obj1 = obj1Value.Convert(int16Type).Interface().(int16)
}
int16obj2, ok := obj2.(int16)
if !ok {
int16obj2 = obj2Value.Convert(int16Type).Interface().(int16)
}
if int16obj1 > int16obj2 {
return compareGreater, true
}
@ -59,8 +101,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Int32:
{
int32obj1 := obj1.(int32)
int32obj2 := obj2.(int32)
int32obj1, ok := obj1.(int32)
if !ok {
int32obj1 = obj1Value.Convert(int32Type).Interface().(int32)
}
int32obj2, ok := obj2.(int32)
if !ok {
int32obj2 = obj2Value.Convert(int32Type).Interface().(int32)
}
if int32obj1 > int32obj2 {
return compareGreater, true
}
@ -73,8 +121,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Int64:
{
int64obj1 := obj1.(int64)
int64obj2 := obj2.(int64)
int64obj1, ok := obj1.(int64)
if !ok {
int64obj1 = obj1Value.Convert(int64Type).Interface().(int64)
}
int64obj2, ok := obj2.(int64)
if !ok {
int64obj2 = obj2Value.Convert(int64Type).Interface().(int64)
}
if int64obj1 > int64obj2 {
return compareGreater, true
}
@ -87,8 +141,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Uint:
{
uintobj1 := obj1.(uint)
uintobj2 := obj2.(uint)
uintobj1, ok := obj1.(uint)
if !ok {
uintobj1 = obj1Value.Convert(uintType).Interface().(uint)
}
uintobj2, ok := obj2.(uint)
if !ok {
uintobj2 = obj2Value.Convert(uintType).Interface().(uint)
}
if uintobj1 > uintobj2 {
return compareGreater, true
}
@ -101,8 +161,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Uint8:
{
uint8obj1 := obj1.(uint8)
uint8obj2 := obj2.(uint8)
uint8obj1, ok := obj1.(uint8)
if !ok {
uint8obj1 = obj1Value.Convert(uint8Type).Interface().(uint8)
}
uint8obj2, ok := obj2.(uint8)
if !ok {
uint8obj2 = obj2Value.Convert(uint8Type).Interface().(uint8)
}
if uint8obj1 > uint8obj2 {
return compareGreater, true
}
@ -115,8 +181,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Uint16:
{
uint16obj1 := obj1.(uint16)
uint16obj2 := obj2.(uint16)
uint16obj1, ok := obj1.(uint16)
if !ok {
uint16obj1 = obj1Value.Convert(uint16Type).Interface().(uint16)
}
uint16obj2, ok := obj2.(uint16)
if !ok {
uint16obj2 = obj2Value.Convert(uint16Type).Interface().(uint16)
}
if uint16obj1 > uint16obj2 {
return compareGreater, true
}
@ -129,8 +201,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Uint32:
{
uint32obj1 := obj1.(uint32)
uint32obj2 := obj2.(uint32)
uint32obj1, ok := obj1.(uint32)
if !ok {
uint32obj1 = obj1Value.Convert(uint32Type).Interface().(uint32)
}
uint32obj2, ok := obj2.(uint32)
if !ok {
uint32obj2 = obj2Value.Convert(uint32Type).Interface().(uint32)
}
if uint32obj1 > uint32obj2 {
return compareGreater, true
}
@ -143,8 +221,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Uint64:
{
uint64obj1 := obj1.(uint64)
uint64obj2 := obj2.(uint64)
uint64obj1, ok := obj1.(uint64)
if !ok {
uint64obj1 = obj1Value.Convert(uint64Type).Interface().(uint64)
}
uint64obj2, ok := obj2.(uint64)
if !ok {
uint64obj2 = obj2Value.Convert(uint64Type).Interface().(uint64)
}
if uint64obj1 > uint64obj2 {
return compareGreater, true
}
@ -157,8 +241,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Float32:
{
float32obj1 := obj1.(float32)
float32obj2 := obj2.(float32)
float32obj1, ok := obj1.(float32)
if !ok {
float32obj1 = obj1Value.Convert(float32Type).Interface().(float32)
}
float32obj2, ok := obj2.(float32)
if !ok {
float32obj2 = obj2Value.Convert(float32Type).Interface().(float32)
}
if float32obj1 > float32obj2 {
return compareGreater, true
}
@ -171,8 +261,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.Float64:
{
float64obj1 := obj1.(float64)
float64obj2 := obj2.(float64)
float64obj1, ok := obj1.(float64)
if !ok {
float64obj1 = obj1Value.Convert(float64Type).Interface().(float64)
}
float64obj2, ok := obj2.(float64)
if !ok {
float64obj2 = obj2Value.Convert(float64Type).Interface().(float64)
}
if float64obj1 > float64obj2 {
return compareGreater, true
}
@ -185,8 +281,14 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
}
case reflect.String:
{
stringobj1 := obj1.(string)
stringobj2 := obj2.(string)
stringobj1, ok := obj1.(string)
if !ok {
stringobj1 = obj1Value.Convert(stringType).Interface().(string)
}
stringobj2, ok := obj2.(string)
if !ok {
stringobj2 = obj2Value.Convert(stringType).Interface().(string)
}
if stringobj1 > stringobj2 {
return compareGreater, true
}
@ -240,6 +342,24 @@ func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...inter
return compareTwoValues(t, e1, e2, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs)
}
// Positive asserts that the specified element is positive
//
// assert.Positive(t, 1)
// assert.Positive(t, 1.23)
func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool {
zero := reflect.Zero(reflect.TypeOf(e))
return compareTwoValues(t, e, zero.Interface(), []CompareType{compareGreater}, "\"%v\" is not positive", msgAndArgs)
}
// Negative asserts that the specified element is negative
//
// assert.Negative(t, -1)
// assert.Negative(t, -1.23)
func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) bool {
zero := reflect.Zero(reflect.TypeOf(e))
return compareTwoValues(t, e, zero.Interface(), []CompareType{compareLess}, "\"%v\" is not negative", msgAndArgs)
}
func compareTwoValues(t TestingT, e1 interface{}, e2 interface{}, allowedComparesResults []CompareType, failMessage string, msgAndArgs ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()

View File

@ -114,6 +114,24 @@ func Errorf(t TestingT, err error, msg string, args ...interface{}) bool {
return Error(t, err, append([]interface{}{msg}, args...)...)
}
// ErrorAsf asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value.
// This is a wrapper for errors.As.
func ErrorAsf(t TestingT, err error, target interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return ErrorAs(t, err, target, append([]interface{}{msg}, args...)...)
}
// ErrorIsf asserts that at least one of the errors in err's chain matches target.
// This is a wrapper for errors.Is.
func ErrorIsf(t TestingT, err error, target error, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return ErrorIs(t, err, target, append([]interface{}{msg}, args...)...)
}
// Eventuallyf asserts that given condition will be met in waitFor time,
// periodically checking target function each tick.
//
@ -321,6 +339,54 @@ func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsil
return InEpsilonSlice(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...)
}
// IsDecreasingf asserts that the collection is decreasing
//
// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted")
// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted")
// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted")
func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return IsDecreasing(t, object, append([]interface{}{msg}, args...)...)
}
// IsIncreasingf asserts that the collection is increasing
//
// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted")
// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted")
// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted")
func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return IsIncreasing(t, object, append([]interface{}{msg}, args...)...)
}
// IsNonDecreasingf asserts that the collection is not decreasing
//
// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted")
// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted")
// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted")
func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return IsNonDecreasing(t, object, append([]interface{}{msg}, args...)...)
}
// IsNonIncreasingf asserts that the collection is not increasing
//
// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted")
// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted")
// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted")
func IsNonIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return IsNonIncreasing(t, object, append([]interface{}{msg}, args...)...)
}
// IsTypef asserts that the specified objects are of the same type.
func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
@ -375,6 +441,17 @@ func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args .
return LessOrEqual(t, e1, e2, append([]interface{}{msg}, args...)...)
}
// Negativef asserts that the specified element is negative
//
// assert.Negativef(t, -1, "error message %s", "formatted")
// assert.Negativef(t, -1.23, "error message %s", "formatted")
func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return Negative(t, e, append([]interface{}{msg}, args...)...)
}
// Neverf asserts that the given condition doesn't satisfy in waitFor time,
// periodically checking the target function each tick.
//
@ -476,6 +553,15 @@ func NotEqualValuesf(t TestingT, expected interface{}, actual interface{}, msg s
return NotEqualValues(t, expected, actual, append([]interface{}{msg}, args...)...)
}
// NotErrorIsf asserts that at none of the errors in err's chain matches target.
// This is a wrapper for errors.Is.
func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return NotErrorIs(t, err, target, append([]interface{}{msg}, args...)...)
}
// NotNilf asserts that the specified object is not nil.
//
// assert.NotNilf(t, err, "error message %s", "formatted")
@ -572,6 +658,17 @@ func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg str
return PanicsWithValue(t, expected, f, append([]interface{}{msg}, args...)...)
}
// Positivef asserts that the specified element is positive
//
// assert.Positivef(t, 1, "error message %s", "formatted")
// assert.Positivef(t, 1.23, "error message %s", "formatted")
func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return Positive(t, e, append([]interface{}{msg}, args...)...)
}
// Regexpf asserts that a specified regexp matches a string.
//
// assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted")

View File

@ -204,6 +204,42 @@ func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool {
return Error(a.t, err, msgAndArgs...)
}
// ErrorAs asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value.
// This is a wrapper for errors.As.
func (a *Assertions) ErrorAs(err error, target interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return ErrorAs(a.t, err, target, msgAndArgs...)
}
// ErrorAsf asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value.
// This is a wrapper for errors.As.
func (a *Assertions) ErrorAsf(err error, target interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return ErrorAsf(a.t, err, target, msg, args...)
}
// ErrorIs asserts that at least one of the errors in err's chain matches target.
// This is a wrapper for errors.Is.
func (a *Assertions) ErrorIs(err error, target error, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return ErrorIs(a.t, err, target, msgAndArgs...)
}
// ErrorIsf asserts that at least one of the errors in err's chain matches target.
// This is a wrapper for errors.Is.
func (a *Assertions) ErrorIsf(err error, target error, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return ErrorIsf(a.t, err, target, msg, args...)
}
// Errorf asserts that a function returned an error (i.e. not `nil`).
//
// actualObj, err := SomeFunction()
@ -631,6 +667,102 @@ func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilo
return InEpsilonf(a.t, expected, actual, epsilon, msg, args...)
}
// IsDecreasing asserts that the collection is decreasing
//
// a.IsDecreasing([]int{2, 1, 0})
// a.IsDecreasing([]float{2, 1})
// a.IsDecreasing([]string{"b", "a"})
func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return IsDecreasing(a.t, object, msgAndArgs...)
}
// IsDecreasingf asserts that the collection is decreasing
//
// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted")
// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted")
// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted")
func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return IsDecreasingf(a.t, object, msg, args...)
}
// IsIncreasing asserts that the collection is increasing
//
// a.IsIncreasing([]int{1, 2, 3})
// a.IsIncreasing([]float{1, 2})
// a.IsIncreasing([]string{"a", "b"})
func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return IsIncreasing(a.t, object, msgAndArgs...)
}
// IsIncreasingf asserts that the collection is increasing
//
// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted")
// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted")
// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted")
func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return IsIncreasingf(a.t, object, msg, args...)
}
// IsNonDecreasing asserts that the collection is not decreasing
//
// a.IsNonDecreasing([]int{1, 1, 2})
// a.IsNonDecreasing([]float{1, 2})
// a.IsNonDecreasing([]string{"a", "b"})
func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return IsNonDecreasing(a.t, object, msgAndArgs...)
}
// IsNonDecreasingf asserts that the collection is not decreasing
//
// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted")
// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted")
// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted")
func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return IsNonDecreasingf(a.t, object, msg, args...)
}
// IsNonIncreasing asserts that the collection is not increasing
//
// a.IsNonIncreasing([]int{2, 1, 1})
// a.IsNonIncreasing([]float{2, 1})
// a.IsNonIncreasing([]string{"b", "a"})
func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return IsNonIncreasing(a.t, object, msgAndArgs...)
}
// IsNonIncreasingf asserts that the collection is not increasing
//
// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted")
// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted")
// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted")
func (a *Assertions) IsNonIncreasingf(object interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return IsNonIncreasingf(a.t, object, msg, args...)
}
// IsType asserts that the specified objects are of the same type.
func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
@ -739,6 +871,28 @@ func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...i
return Lessf(a.t, e1, e2, msg, args...)
}
// Negative asserts that the specified element is negative
//
// a.Negative(-1)
// a.Negative(-1.23)
func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return Negative(a.t, e, msgAndArgs...)
}
// Negativef asserts that the specified element is negative
//
// a.Negativef(-1, "error message %s", "formatted")
// a.Negativef(-1.23, "error message %s", "formatted")
func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return Negativef(a.t, e, msg, args...)
}
// Never asserts that the given condition doesn't satisfy in waitFor time,
// periodically checking the target function each tick.
//
@ -941,6 +1095,24 @@ func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg str
return NotEqualf(a.t, expected, actual, msg, args...)
}
// NotErrorIs asserts that at none of the errors in err's chain matches target.
// This is a wrapper for errors.Is.
func (a *Assertions) NotErrorIs(err error, target error, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return NotErrorIs(a.t, err, target, msgAndArgs...)
}
// NotErrorIsf asserts that at none of the errors in err's chain matches target.
// This is a wrapper for errors.Is.
func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return NotErrorIsf(a.t, err, target, msg, args...)
}
// NotNil asserts that the specified object is not nil.
//
// a.NotNil(err)
@ -1133,6 +1305,28 @@ func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) b
return Panicsf(a.t, f, msg, args...)
}
// Positive asserts that the specified element is positive
//
// a.Positive(1)
// a.Positive(1.23)
func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return Positive(a.t, e, msgAndArgs...)
}
// Positivef asserts that the specified element is positive
//
// a.Positivef(1, "error message %s", "formatted")
// a.Positivef(1.23, "error message %s", "formatted")
func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return Positivef(a.t, e, msg, args...)
}
// Regexp asserts that a specified regexp matches a string.
//
// a.Regexp(regexp.MustCompile("start"), "it's starting")

View File

@ -0,0 +1,81 @@
package assert
import (
"fmt"
"reflect"
)
// isOrdered checks that collection contains orderable elements.
func isOrdered(t TestingT, object interface{}, allowedComparesResults []CompareType, failMessage string, msgAndArgs ...interface{}) bool {
objKind := reflect.TypeOf(object).Kind()
if objKind != reflect.Slice && objKind != reflect.Array {
return false
}
objValue := reflect.ValueOf(object)
objLen := objValue.Len()
if objLen <= 1 {
return true
}
value := objValue.Index(0)
valueInterface := value.Interface()
firstValueKind := value.Kind()
for i := 1; i < objLen; i++ {
prevValue := value
prevValueInterface := valueInterface
value = objValue.Index(i)
valueInterface = value.Interface()
compareResult, isComparable := compare(prevValueInterface, valueInterface, firstValueKind)
if !isComparable {
return Fail(t, fmt.Sprintf("Can not compare type \"%s\" and \"%s\"", reflect.TypeOf(value), reflect.TypeOf(prevValue)), msgAndArgs...)
}
if !containsValue(allowedComparesResults, compareResult) {
return Fail(t, fmt.Sprintf(failMessage, prevValue, value), msgAndArgs...)
}
}
return true
}
// IsIncreasing asserts that the collection is increasing
//
// assert.IsIncreasing(t, []int{1, 2, 3})
// assert.IsIncreasing(t, []float{1, 2})
// assert.IsIncreasing(t, []string{"a", "b"})
func IsIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
return isOrdered(t, object, []CompareType{compareLess}, "\"%v\" is not less than \"%v\"", msgAndArgs)
}
// IsNonIncreasing asserts that the collection is not increasing
//
// assert.IsNonIncreasing(t, []int{2, 1, 1})
// assert.IsNonIncreasing(t, []float{2, 1})
// assert.IsNonIncreasing(t, []string{"b", "a"})
func IsNonIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
return isOrdered(t, object, []CompareType{compareEqual, compareGreater}, "\"%v\" is not greater than or equal to \"%v\"", msgAndArgs)
}
// IsDecreasing asserts that the collection is decreasing
//
// assert.IsDecreasing(t, []int{2, 1, 0})
// assert.IsDecreasing(t, []float{2, 1})
// assert.IsDecreasing(t, []string{"b", "a"})
func IsDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
return isOrdered(t, object, []CompareType{compareGreater}, "\"%v\" is not greater than \"%v\"", msgAndArgs)
}
// IsNonDecreasing asserts that the collection is not decreasing
//
// assert.IsNonDecreasing(t, []int{1, 1, 2})
// assert.IsNonDecreasing(t, []float{1, 2})
// assert.IsNonDecreasing(t, []string{"a", "b"})
func IsNonDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
return isOrdered(t, object, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs)
}

View File

@ -172,8 +172,8 @@ func isTest(name, prefix string) bool {
if len(name) == len(prefix) { // "Test" is ok
return true
}
rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
return !unicode.IsLower(rune)
r, _ := utf8.DecodeRuneInString(name[len(prefix):])
return !unicode.IsLower(r)
}
func messageFromMsgAndArgs(msgAndArgs ...interface{}) string {
@ -1622,6 +1622,7 @@ var spewConfig = spew.ConfigState{
DisableCapacities: true,
SortKeys: true,
DisableMethods: true,
MaxDepth: 10,
}
type tHelper interface {
@ -1693,3 +1694,81 @@ func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.D
}
}
}
// ErrorIs asserts that at least one of the errors in err's chain matches target.
// This is a wrapper for errors.Is.
func ErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
if errors.Is(err, target) {
return true
}
var expectedText string
if target != nil {
expectedText = target.Error()
}
chain := buildErrorChainString(err)
return Fail(t, fmt.Sprintf("Target error should be in err chain:\n"+
"expected: %q\n"+
"in chain: %s", expectedText, chain,
), msgAndArgs...)
}
// NotErrorIs asserts that at none of the errors in err's chain matches target.
// This is a wrapper for errors.Is.
func NotErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
if !errors.Is(err, target) {
return true
}
var expectedText string
if target != nil {
expectedText = target.Error()
}
chain := buildErrorChainString(err)
return Fail(t, fmt.Sprintf("Target error should not be in err chain:\n"+
"found: %q\n"+
"in chain: %s", expectedText, chain,
), msgAndArgs...)
}
// ErrorAs asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value.
// This is a wrapper for errors.As.
func ErrorAs(t TestingT, err error, target interface{}, msgAndArgs ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
if errors.As(err, target) {
return true
}
chain := buildErrorChainString(err)
return Fail(t, fmt.Sprintf("Should be in error chain:\n"+
"expected: %q\n"+
"in chain: %s", target, chain,
), msgAndArgs...)
}
func buildErrorChainString(err error) string {
if err == nil {
return ""
}
e := errors.Unwrap(err)
chain := fmt.Sprintf("%q", err.Error())
for e != nil {
chain += fmt.Sprintf("\n\t%q", e.Error())
e = errors.Unwrap(e)
}
return chain
}

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.11 && gc && !purego
// +build go1.11,gc,!purego
package chacha20

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build (!arm64 && !s390x && !ppc64le) || (arm64 && !go1.11) || !gc || purego
// +build !arm64,!s390x,!ppc64le arm64,!go1.11 !gc purego
package chacha20

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gc && !purego
// +build gc,!purego
package chacha20

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gc && !purego
// +build gc,!purego
package chacha20

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build amd64 && gc && !purego
// +build amd64,gc,!purego
package curve25519

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !amd64 || !gc || purego
// +build !amd64 !gc purego
package curve25519

View File

@ -5,6 +5,7 @@
// In Go 1.13, the ed25519 package was promoted to the standard library as
// crypto/ed25519, and this package became a wrapper for the standard library one.
//
//go:build !go1.13
// +build !go1.13
// Package ed25519 implements the Ed25519 signature algorithm. See

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.13
// +build go1.13
// Package ed25519 implements the Ed25519 signature algorithm. See

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !purego
// +build !purego
// Package subtle implements functions that are often useful in cryptographic

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build purego
// +build purego
// Package subtle implements functions that are often useful in cryptographic

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.13
// +build !go1.13
package poly1305

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.13
// +build go1.13
package poly1305

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build (!amd64 && !ppc64le && !s390x) || !gc || purego
// +build !amd64,!ppc64le,!s390x !gc purego
package poly1305

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gc && !purego
// +build gc,!purego
package poly1305

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gc && !purego
// +build gc,!purego
package poly1305

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gc && !purego
// +build gc,!purego
package poly1305

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.7
// +build go1.7
package context

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.9
// +build go1.9
package context

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.7
// +build !go1.7
package context

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.9
// +build !go1.9
package context

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build aix
// +build aix
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gc
// +build gc
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gc
// +build gc
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build (386 || amd64 || amd64p32) && gc
// +build 386 amd64 amd64p32
// +build gc

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gccgo
// +build gccgo
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gccgo
// +build gccgo
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build (386 || amd64 || amd64p32) && gccgo
// +build 386 amd64 amd64p32
// +build gccgo

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !386 && !amd64 && !amd64p32 && !arm64
// +build !386,!amd64,!amd64p32,!arm64
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build linux && (mips64 || mips64le)
// +build linux
// +build mips64 mips64le

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build linux && !arm && !arm64 && !mips64 && !mips64le && !ppc64 && !ppc64le && !s390x
// +build linux,!arm,!arm64,!mips64,!mips64le,!ppc64,!ppc64le,!s390x
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build linux && (ppc64 || ppc64le)
// +build linux
// +build ppc64 ppc64le

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build mips64 || mips64le
// +build mips64 mips64le
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build mips || mipsle
// +build mips mipsle
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !linux && arm
// +build !linux,arm
package cpu

View File

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !linux,!netbsd
// +build arm64
//go:build !linux && !netbsd && arm64
// +build !linux,!netbsd,arm64
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !linux && (mips64 || mips64le)
// +build !linux
// +build mips64 mips64le

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build ppc64 || ppc64le
// +build ppc64 ppc64le
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build riscv64
// +build riscv64
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build wasm
// +build wasm
package cpu

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build 386 || amd64 || amd64p32
// +build 386 amd64 amd64p32
package cpu

View File

@ -8,8 +8,8 @@
// Morever, this file will be used during the building of
// gccgo's libgo and thus must not used a CGo method.
// +build aix
// +build gccgo
//go:build aix && gccgo
// +build aix,gccgo
package cpu

View File

@ -6,8 +6,8 @@
// system call on AIX without depending on x/sys/unix.
// (See golang.org/issue/32102)
// +build aix,ppc64
// +build gc
//go:build aix && ppc64 && gc
// +build aix,ppc64,gc
package cpu

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
//go:build (aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos) && go1.9
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
// +build go1.9
package unix

426
vendor/golang.org/x/sys/unix/asm_zos_s390x.s generated vendored Normal file
View File

@ -0,0 +1,426 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build zos && s390x && gc
// +build zos
// +build s390x
// +build gc
#include "textflag.h"
#define PSALAA 1208(R0)
#define GTAB64(x) 80(x)
#define LCA64(x) 88(x)
#define CAA(x) 8(x)
#define EDCHPXV(x) 1016(x) // in the CAA
#define SAVSTACK_ASYNC(x) 336(x) // in the LCA
// SS_*, where x=SAVSTACK_ASYNC
#define SS_LE(x) 0(x)
#define SS_GO(x) 8(x)
#define SS_ERRNO(x) 16(x)
#define SS_ERRNOJR(x) 20(x)
#define LE_CALL BYTE $0x0D; BYTE $0x76; // BL R7, R6
TEXT ·clearErrno(SB),NOSPLIT,$0-0
BL addrerrno<>(SB)
MOVD $0, 0(R3)
RET
// Returns the address of errno in R3.
TEXT addrerrno<>(SB),NOSPLIT|NOFRAME,$0-0
// Get library control area (LCA).
MOVW PSALAA, R8
MOVD LCA64(R8), R8
// Get __errno FuncDesc.
MOVD CAA(R8), R9
MOVD EDCHPXV(R9), R9
ADD $(0x156*16), R9
LMG 0(R9), R5, R6
// Switch to saved LE stack.
MOVD SAVSTACK_ASYNC(R8), R9
MOVD 0(R9), R4
MOVD $0, 0(R9)
// Call __errno function.
LE_CALL
NOPH
// Switch back to Go stack.
XOR R0, R0 // Restore R0 to $0.
MOVD R4, 0(R9) // Save stack pointer.
RET
TEXT ·syscall_syscall(SB),NOSPLIT,$0-56
BL runtime·entersyscall(SB)
MOVD a1+8(FP), R1
MOVD a2+16(FP), R2
MOVD a3+24(FP), R3
// Get library control area (LCA).
MOVW PSALAA, R8
MOVD LCA64(R8), R8
// Get function.
MOVD CAA(R8), R9
MOVD EDCHPXV(R9), R9
MOVD trap+0(FP), R5
SLD $4, R5
ADD R5, R9
LMG 0(R9), R5, R6
// Restore LE stack.
MOVD SAVSTACK_ASYNC(R8), R9
MOVD 0(R9), R4
MOVD $0, 0(R9)
// Call function.
LE_CALL
NOPH
XOR R0, R0 // Restore R0 to $0.
MOVD R4, 0(R9) // Save stack pointer.
MOVD R3, r1+32(FP)
MOVD R0, r2+40(FP)
MOVD R0, err+48(FP)
MOVW R3, R4
CMP R4, $-1
BNE done
BL addrerrno<>(SB)
MOVWZ 0(R3), R3
MOVD R3, err+48(FP)
done:
BL runtime·exitsyscall(SB)
RET
TEXT ·syscall_rawsyscall(SB),NOSPLIT,$0-56
MOVD a1+8(FP), R1
MOVD a2+16(FP), R2
MOVD a3+24(FP), R3
// Get library control area (LCA).
MOVW PSALAA, R8
MOVD LCA64(R8), R8
// Get function.
MOVD CAA(R8), R9
MOVD EDCHPXV(R9), R9
MOVD trap+0(FP), R5
SLD $4, R5
ADD R5, R9
LMG 0(R9), R5, R6
// Restore LE stack.
MOVD SAVSTACK_ASYNC(R8), R9
MOVD 0(R9), R4
MOVD $0, 0(R9)
// Call function.
LE_CALL
NOPH
XOR R0, R0 // Restore R0 to $0.
MOVD R4, 0(R9) // Save stack pointer.
MOVD R3, r1+32(FP)
MOVD R0, r2+40(FP)
MOVD R0, err+48(FP)
MOVW R3, R4
CMP R4, $-1
BNE done
BL addrerrno<>(SB)
MOVWZ 0(R3), R3
MOVD R3, err+48(FP)
done:
RET
TEXT ·syscall_syscall6(SB),NOSPLIT,$0-80
BL runtime·entersyscall(SB)
MOVD a1+8(FP), R1
MOVD a2+16(FP), R2
MOVD a3+24(FP), R3
// Get library control area (LCA).
MOVW PSALAA, R8
MOVD LCA64(R8), R8
// Get function.
MOVD CAA(R8), R9
MOVD EDCHPXV(R9), R9
MOVD trap+0(FP), R5
SLD $4, R5
ADD R5, R9
LMG 0(R9), R5, R6
// Restore LE stack.
MOVD SAVSTACK_ASYNC(R8), R9
MOVD 0(R9), R4
MOVD $0, 0(R9)
// Fill in parameter list.
MOVD a4+32(FP), R12
MOVD R12, (2176+24)(R4)
MOVD a5+40(FP), R12
MOVD R12, (2176+32)(R4)
MOVD a6+48(FP), R12
MOVD R12, (2176+40)(R4)
// Call function.
LE_CALL
NOPH
XOR R0, R0 // Restore R0 to $0.
MOVD R4, 0(R9) // Save stack pointer.
MOVD R3, r1+56(FP)
MOVD R0, r2+64(FP)
MOVD R0, err+72(FP)
MOVW R3, R4
CMP R4, $-1
BNE done
BL addrerrno<>(SB)
MOVWZ 0(R3), R3
MOVD R3, err+72(FP)
done:
BL runtime·exitsyscall(SB)
RET
TEXT ·syscall_rawsyscall6(SB),NOSPLIT,$0-80
MOVD a1+8(FP), R1
MOVD a2+16(FP), R2
MOVD a3+24(FP), R3
// Get library control area (LCA).
MOVW PSALAA, R8
MOVD LCA64(R8), R8
// Get function.
MOVD CAA(R8), R9
MOVD EDCHPXV(R9), R9
MOVD trap+0(FP), R5
SLD $4, R5
ADD R5, R9
LMG 0(R9), R5, R6
// Restore LE stack.
MOVD SAVSTACK_ASYNC(R8), R9
MOVD 0(R9), R4
MOVD $0, 0(R9)
// Fill in parameter list.
MOVD a4+32(FP), R12
MOVD R12, (2176+24)(R4)
MOVD a5+40(FP), R12
MOVD R12, (2176+32)(R4)
MOVD a6+48(FP), R12
MOVD R12, (2176+40)(R4)
// Call function.
LE_CALL
NOPH
XOR R0, R0 // Restore R0 to $0.
MOVD R4, 0(R9) // Save stack pointer.
MOVD R3, r1+56(FP)
MOVD R0, r2+64(FP)
MOVD R0, err+72(FP)
MOVW R3, R4
CMP R4, $-1
BNE done
BL ·rrno<>(SB)
MOVWZ 0(R3), R3
MOVD R3, err+72(FP)
done:
RET
TEXT ·syscall_syscall9(SB),NOSPLIT,$0
BL runtime·entersyscall(SB)
MOVD a1+8(FP), R1
MOVD a2+16(FP), R2
MOVD a3+24(FP), R3
// Get library control area (LCA).
MOVW PSALAA, R8
MOVD LCA64(R8), R8
// Get function.
MOVD CAA(R8), R9
MOVD EDCHPXV(R9), R9
MOVD trap+0(FP), R5
SLD $4, R5
ADD R5, R9
LMG 0(R9), R5, R6
// Restore LE stack.
MOVD SAVSTACK_ASYNC(R8), R9
MOVD 0(R9), R4
MOVD $0, 0(R9)
// Fill in parameter list.
MOVD a4+32(FP), R12
MOVD R12, (2176+24)(R4)
MOVD a5+40(FP), R12
MOVD R12, (2176+32)(R4)
MOVD a6+48(FP), R12
MOVD R12, (2176+40)(R4)
MOVD a7+56(FP), R12
MOVD R12, (2176+48)(R4)
MOVD a8+64(FP), R12
MOVD R12, (2176+56)(R4)
MOVD a9+72(FP), R12
MOVD R12, (2176+64)(R4)
// Call function.
LE_CALL
NOPH
XOR R0, R0 // Restore R0 to $0.
MOVD R4, 0(R9) // Save stack pointer.
MOVD R3, r1+80(FP)
MOVD R0, r2+88(FP)
MOVD R0, err+96(FP)
MOVW R3, R4
CMP R4, $-1
BNE done
BL addrerrno<>(SB)
MOVWZ 0(R3), R3
MOVD R3, err+96(FP)
done:
BL runtime·exitsyscall(SB)
RET
TEXT ·syscall_rawsyscall9(SB),NOSPLIT,$0
MOVD a1+8(FP), R1
MOVD a2+16(FP), R2
MOVD a3+24(FP), R3
// Get library control area (LCA).
MOVW PSALAA, R8
MOVD LCA64(R8), R8
// Get function.
MOVD CAA(R8), R9
MOVD EDCHPXV(R9), R9
MOVD trap+0(FP), R5
SLD $4, R5
ADD R5, R9
LMG 0(R9), R5, R6
// Restore LE stack.
MOVD SAVSTACK_ASYNC(R8), R9
MOVD 0(R9), R4
MOVD $0, 0(R9)
// Fill in parameter list.
MOVD a4+32(FP), R12
MOVD R12, (2176+24)(R4)
MOVD a5+40(FP), R12
MOVD R12, (2176+32)(R4)
MOVD a6+48(FP), R12
MOVD R12, (2176+40)(R4)
MOVD a7+56(FP), R12
MOVD R12, (2176+48)(R4)
MOVD a8+64(FP), R12
MOVD R12, (2176+56)(R4)
MOVD a9+72(FP), R12
MOVD R12, (2176+64)(R4)
// Call function.
LE_CALL
NOPH
XOR R0, R0 // Restore R0 to $0.
MOVD R4, 0(R9) // Save stack pointer.
MOVD R3, r1+80(FP)
MOVD R0, r2+88(FP)
MOVD R0, err+96(FP)
MOVW R3, R4
CMP R4, $-1
BNE done
BL addrerrno<>(SB)
MOVWZ 0(R3), R3
MOVD R3, err+96(FP)
done:
RET
// func svcCall(fnptr unsafe.Pointer, argv *unsafe.Pointer, dsa *uint64)
TEXT ·svcCall(SB),NOSPLIT,$0
BL runtime·save_g(SB) // Save g and stack pointer
MOVW PSALAA, R8
MOVD LCA64(R8), R8
MOVD SAVSTACK_ASYNC(R8), R9
MOVD R15, 0(R9)
MOVD argv+8(FP), R1 // Move function arguments into registers
MOVD dsa+16(FP), g
MOVD fnptr+0(FP), R15
BYTE $0x0D // Branch to function
BYTE $0xEF
BL runtime·load_g(SB) // Restore g and stack pointer
MOVW PSALAA, R8
MOVD LCA64(R8), R8
MOVD SAVSTACK_ASYNC(R8), R9
MOVD 0(R9), R15
RET
// func svcLoad(name *byte) unsafe.Pointer
TEXT ·svcLoad(SB),NOSPLIT,$0
MOVD R15, R2 // Save go stack pointer
MOVD name+0(FP), R0 // Move SVC args into registers
MOVD $0x80000000, R1
MOVD $0, R15
BYTE $0x0A // SVC 08 LOAD
BYTE $0x08
MOVW R15, R3 // Save return code from SVC
MOVD R2, R15 // Restore go stack pointer
CMP R3, $0 // Check SVC return code
BNE error
MOVD $-2, R3 // Reset last bit of entry point to zero
AND R0, R3
MOVD R3, addr+8(FP) // Return entry point returned by SVC
CMP R0, R3 // Check if last bit of entry point was set
BNE done
MOVD R15, R2 // Save go stack pointer
MOVD $0, R15 // Move SVC args into registers (entry point still in r0 from SVC 08)
BYTE $0x0A // SVC 09 DELETE
BYTE $0x09
MOVD R2, R15 // Restore go stack pointer
error:
MOVD $0, addr+8(FP) // Return 0 on failure
done:
XOR R0, R0 // Reset r0 to 0
RET
// func svcUnload(name *byte, fnptr unsafe.Pointer) int64
TEXT ·svcUnload(SB),NOSPLIT,$0
MOVD R15, R2 // Save go stack pointer
MOVD name+0(FP), R0 // Move SVC args into registers
MOVD addr+8(FP), R15
BYTE $0x0A // SVC 09
BYTE $0x09
XOR R0, R0 // Reset r0 to 0
MOVD R15, R1 // Save SVC return code
MOVD R2, R15 // Restore go stack pointer
MOVD R1, rc+0(FP) // Return SVC return code
RET
// func gettid() uint64
TEXT ·gettid(SB), NOSPLIT, $0
// Get library control area (LCA).
MOVW PSALAA, R8
MOVD LCA64(R8), R8
// Get CEECAATHDID
MOVD CAA(R8), R9
MOVD 0x3D0(R9), R9
MOVD R9, ret+0(FP)
RET

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build freebsd
// +build freebsd
package unix

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
package unix

View File

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix
// +build ppc
//go:build aix && ppc
// +build aix,ppc
// Functions to access/create device major and minor numbers matching the
// encoding used by AIX.

View File

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix
// +build ppc64
//go:build aix && ppc64
// +build aix,ppc64
// Functions to access/create device major and minor numbers matching the
// encoding used AIX.

29
vendor/golang.org/x/sys/unix/dev_zos.go generated vendored Normal file
View File

@ -0,0 +1,29 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build zos && s390x
// +build zos,s390x
// Functions to access/create device major and minor numbers matching the
// encoding used by z/OS.
//
// The information below is extracted and adapted from <sys/stat.h> macros.
package unix
// Major returns the major component of a z/OS device number.
func Major(dev uint64) uint32 {
return uint32((dev >> 16) & 0x0000FFFF)
}
// Minor returns the minor component of a z/OS device number.
func Minor(dev uint64) uint32 {
return uint32(dev & 0x0000FFFF)
}
// Mkdev returns a z/OS device number generated from the given major and minor
// components.
func Mkdev(major, minor uint32) uint64 {
return (uint64(major) << 16) | uint64(minor)
}

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//
//go:build armbe || arm64be || m68k || mips || mips64 || mips64p32 || ppc || ppc64 || s390 || s390x || shbe || sparc || sparc64
// +build armbe arm64be m68k mips mips64 mips64p32 ppc ppc64 s390 s390x shbe sparc sparc64
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//
//go:build 386 || amd64 || amd64p32 || alpha || arm || arm64 || mipsle || mips64le || mips64p32le || nios2 || ppc64le || riscv || riscv64 || sh
// +build 386 amd64 amd64p32 alpha arm arm64 mipsle mips64le mips64p32le nios2 ppc64le riscv riscv64 sh
package unix

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
// Unix environment variables.

221
vendor/golang.org/x/sys/unix/epoll_zos.go generated vendored Normal file
View File

@ -0,0 +1,221 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build zos && s390x
// +build zos,s390x
package unix
import (
"sync"
)
// This file simulates epoll on z/OS using poll.
// Analogous to epoll_event on Linux.
// TODO(neeilan): Pad is because the Linux kernel expects a 96-bit struct. We never pass this to the kernel; remove?
type EpollEvent struct {
Events uint32
Fd int32
Pad int32
}
const (
EPOLLERR = 0x8
EPOLLHUP = 0x10
EPOLLIN = 0x1
EPOLLMSG = 0x400
EPOLLOUT = 0x4
EPOLLPRI = 0x2
EPOLLRDBAND = 0x80
EPOLLRDNORM = 0x40
EPOLLWRBAND = 0x200
EPOLLWRNORM = 0x100
EPOLL_CTL_ADD = 0x1
EPOLL_CTL_DEL = 0x2
EPOLL_CTL_MOD = 0x3
// The following constants are part of the epoll API, but represent
// currently unsupported functionality on z/OS.
// EPOLL_CLOEXEC = 0x80000
// EPOLLET = 0x80000000
// EPOLLONESHOT = 0x40000000
// EPOLLRDHUP = 0x2000 // Typically used with edge-triggered notis
// EPOLLEXCLUSIVE = 0x10000000 // Exclusive wake-up mode
// EPOLLWAKEUP = 0x20000000 // Relies on Linux's BLOCK_SUSPEND capability
)
// TODO(neeilan): We can eliminate these epToPoll / pToEpoll calls by using identical mask values for POLL/EPOLL
// constants where possible The lower 16 bits of epoll events (uint32) can fit any system poll event (int16).
// epToPollEvt converts epoll event field to poll equivalent.
// In epoll, Events is a 32-bit field, while poll uses 16 bits.
func epToPollEvt(events uint32) int16 {
var ep2p = map[uint32]int16{
EPOLLIN: POLLIN,
EPOLLOUT: POLLOUT,
EPOLLHUP: POLLHUP,
EPOLLPRI: POLLPRI,
EPOLLERR: POLLERR,
}
var pollEvts int16 = 0
for epEvt, pEvt := range ep2p {
if (events & epEvt) != 0 {
pollEvts |= pEvt
}
}
return pollEvts
}
// pToEpollEvt converts 16 bit poll event bitfields to 32-bit epoll event fields.
func pToEpollEvt(revents int16) uint32 {
var p2ep = map[int16]uint32{
POLLIN: EPOLLIN,
POLLOUT: EPOLLOUT,
POLLHUP: EPOLLHUP,
POLLPRI: EPOLLPRI,
POLLERR: EPOLLERR,
}
var epollEvts uint32 = 0
for pEvt, epEvt := range p2ep {
if (revents & pEvt) != 0 {
epollEvts |= epEvt
}
}
return epollEvts
}
// Per-process epoll implementation.
type epollImpl struct {
mu sync.Mutex
epfd2ep map[int]*eventPoll
nextEpfd int
}
// eventPoll holds a set of file descriptors being watched by the process. A process can have multiple epoll instances.
// On Linux, this is an in-kernel data structure accessed through a fd.
type eventPoll struct {
mu sync.Mutex
fds map[int]*EpollEvent
}
// epoll impl for this process.
var impl epollImpl = epollImpl{
epfd2ep: make(map[int]*eventPoll),
nextEpfd: 0,
}
func (e *epollImpl) epollcreate(size int) (epfd int, err error) {
e.mu.Lock()
defer e.mu.Unlock()
epfd = e.nextEpfd
e.nextEpfd++
e.epfd2ep[epfd] = &eventPoll{
fds: make(map[int]*EpollEvent),
}
return epfd, nil
}
func (e *epollImpl) epollcreate1(flag int) (fd int, err error) {
return e.epollcreate(4)
}
func (e *epollImpl) epollctl(epfd int, op int, fd int, event *EpollEvent) (err error) {
e.mu.Lock()
defer e.mu.Unlock()
ep, ok := e.epfd2ep[epfd]
if !ok {
return EBADF
}
switch op {
case EPOLL_CTL_ADD:
// TODO(neeilan): When we make epfds and fds disjoint, detect epoll
// loops here (instances watching each other) and return ELOOP.
if _, ok := ep.fds[fd]; ok {
return EEXIST
}
ep.fds[fd] = event
case EPOLL_CTL_MOD:
if _, ok := ep.fds[fd]; !ok {
return ENOENT
}
ep.fds[fd] = event
case EPOLL_CTL_DEL:
if _, ok := ep.fds[fd]; !ok {
return ENOENT
}
delete(ep.fds, fd)
}
return nil
}
// Must be called while holding ep.mu
func (ep *eventPoll) getFds() []int {
fds := make([]int, len(ep.fds))
for fd := range ep.fds {
fds = append(fds, fd)
}
return fds
}
func (e *epollImpl) epollwait(epfd int, events []EpollEvent, msec int) (n int, err error) {
e.mu.Lock() // in [rare] case of concurrent epollcreate + epollwait
ep, ok := e.epfd2ep[epfd]
if !ok {
e.mu.Unlock()
return 0, EBADF
}
pollfds := make([]PollFd, 4)
for fd, epollevt := range ep.fds {
pollfds = append(pollfds, PollFd{Fd: int32(fd), Events: epToPollEvt(epollevt.Events)})
}
e.mu.Unlock()
n, err = Poll(pollfds, msec)
if err != nil {
return n, err
}
i := 0
for _, pFd := range pollfds {
if pFd.Revents != 0 {
events[i] = EpollEvent{Fd: pFd.Fd, Events: pToEpollEvt(pFd.Revents)}
i++
}
if i == n {
break
}
}
return n, nil
}
func EpollCreate(size int) (fd int, err error) {
return impl.epollcreate(size)
}
func EpollCreate1(flag int) (fd int, err error) {
return impl.epollcreate1(flag)
}
func EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) {
return impl.epollctl(epfd, op, fd, event)
}
// Because EpollWait mutates events, the caller is expected to coordinate
// concurrent access if calling with the same epfd from multiple goroutines.
func EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) {
return impl.epollwait(epfd, events, msec)
}

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build dragonfly || freebsd || linux || netbsd || openbsd
// +build dragonfly freebsd linux netbsd openbsd
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build (linux && 386) || (linux && arm) || (linux && mips) || (linux && mipsle)
// +build linux,386 linux,arm linux,mips linux,mipsle
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
package unix

164
vendor/golang.org/x/sys/unix/fstatfs_zos.go generated vendored Normal file
View File

@ -0,0 +1,164 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build zos && s390x
// +build zos,s390x
package unix
import (
"unsafe"
)
// This file simulates fstatfs on z/OS using fstatvfs and w_getmntent.
func Fstatfs(fd int, stat *Statfs_t) (err error) {
var stat_v Statvfs_t
err = Fstatvfs(fd, &stat_v)
if err == nil {
// populate stat
stat.Type = 0
stat.Bsize = stat_v.Bsize
stat.Blocks = stat_v.Blocks
stat.Bfree = stat_v.Bfree
stat.Bavail = stat_v.Bavail
stat.Files = stat_v.Files
stat.Ffree = stat_v.Ffree
stat.Fsid = stat_v.Fsid
stat.Namelen = stat_v.Namemax
stat.Frsize = stat_v.Frsize
stat.Flags = stat_v.Flag
for passn := 0; passn < 5; passn++ {
switch passn {
case 0:
err = tryGetmntent64(stat)
break
case 1:
err = tryGetmntent128(stat)
break
case 2:
err = tryGetmntent256(stat)
break
case 3:
err = tryGetmntent512(stat)
break
case 4:
err = tryGetmntent1024(stat)
break
default:
break
}
//proceed to return if: err is nil (found), err is nonnil but not ERANGE (another error occurred)
if err == nil || err != nil && err != ERANGE {
break
}
}
}
return err
}
func tryGetmntent64(stat *Statfs_t) (err error) {
var mnt_ent_buffer struct {
header W_Mnth
filesys_info [64]W_Mntent
}
var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
if err != nil {
return err
}
err = ERANGE //return ERANGE if no match is found in this batch
for i := 0; i < fs_count; i++ {
if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
err = nil
break
}
}
return err
}
func tryGetmntent128(stat *Statfs_t) (err error) {
var mnt_ent_buffer struct {
header W_Mnth
filesys_info [128]W_Mntent
}
var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
if err != nil {
return err
}
err = ERANGE //return ERANGE if no match is found in this batch
for i := 0; i < fs_count; i++ {
if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
err = nil
break
}
}
return err
}
func tryGetmntent256(stat *Statfs_t) (err error) {
var mnt_ent_buffer struct {
header W_Mnth
filesys_info [256]W_Mntent
}
var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
if err != nil {
return err
}
err = ERANGE //return ERANGE if no match is found in this batch
for i := 0; i < fs_count; i++ {
if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
err = nil
break
}
}
return err
}
func tryGetmntent512(stat *Statfs_t) (err error) {
var mnt_ent_buffer struct {
header W_Mnth
filesys_info [512]W_Mntent
}
var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
if err != nil {
return err
}
err = ERANGE //return ERANGE if no match is found in this batch
for i := 0; i < fs_count; i++ {
if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
err = nil
break
}
}
return err
}
func tryGetmntent1024(stat *Statfs_t) (err error) {
var mnt_ent_buffer struct {
header W_Mnth
filesys_info [1024]W_Mntent
}
var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
if err != nil {
return err
}
err = ERANGE //return ERANGE if no match is found in this batch
for i := 0; i < fs_count; i++ {
if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
err = nil
break
}
}
return err
}

View File

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build gccgo
// +build !aix
//go:build gccgo && !aix
// +build gccgo,!aix
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build gccgo && linux && amd64
// +build gccgo,linux,amd64
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
package unix

74
vendor/golang.org/x/sys/unix/ioctl_zos.go generated vendored Normal file
View File

@ -0,0 +1,74 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build zos && s390x
// +build zos,s390x
package unix
import (
"runtime"
"unsafe"
)
// ioctl itself should not be exposed directly, but additional get/set
// functions for specific types are permissible.
// IoctlSetInt performs an ioctl operation which sets an integer value
// on fd, using the specified request number.
func IoctlSetInt(fd int, req uint, value int) error {
return ioctl(fd, req, uintptr(value))
}
// IoctlSetWinsize performs an ioctl on fd with a *Winsize argument.
//
// To change fd's window size, the req argument should be TIOCSWINSZ.
func IoctlSetWinsize(fd int, req uint, value *Winsize) error {
// TODO: if we get the chance, remove the req parameter and
// hardcode TIOCSWINSZ.
err := ioctl(fd, req, uintptr(unsafe.Pointer(value)))
runtime.KeepAlive(value)
return err
}
// IoctlSetTermios performs an ioctl on fd with a *Termios.
//
// The req value is expected to be TCSETS, TCSETSW, or TCSETSF
func IoctlSetTermios(fd int, req uint, value *Termios) error {
if (req != TCSETS) && (req != TCSETSW) && (req != TCSETSF) {
return ENOSYS
}
err := Tcsetattr(fd, int(req), value)
runtime.KeepAlive(value)
return err
}
// IoctlGetInt performs an ioctl operation which gets an integer value
// from fd, using the specified request number.
//
// A few ioctl requests use the return value as an output parameter;
// for those, IoctlRetInt should be used instead of this function.
func IoctlGetInt(fd int, req uint) (int, error) {
var value int
err := ioctl(fd, req, uintptr(unsafe.Pointer(&value)))
return value, err
}
func IoctlGetWinsize(fd int, req uint) (*Winsize, error) {
var value Winsize
err := ioctl(fd, req, uintptr(unsafe.Pointer(&value)))
return &value, err
}
// IoctlGetTermios performs an ioctl on fd with a *Termios.
//
// The req value is expected to be TCGETS
func IoctlGetTermios(fd int, req uint) (*Termios, error) {
var value Termios
if req != TCGETS {
return &value, ENOSYS
}
err := Tcgetattr(fd, &value)
return &value, err
}

View File

@ -65,6 +65,7 @@ includes_Darwin='
#include <sys/ptrace.h>
#include <sys/select.h>
#include <sys/socket.h>
#include <sys/un.h>
#include <sys/sockio.h>
#include <sys/sys_domain.h>
#include <sys/sysctl.h>
@ -114,6 +115,7 @@ includes_FreeBSD='
#include <sys/sched.h>
#include <sys/select.h>
#include <sys/socket.h>
#include <sys/un.h>
#include <sys/sockio.h>
#include <sys/stat.h>
#include <sys/sysctl.h>
@ -204,6 +206,7 @@ struct ltchars {
#include <linux/devlink.h>
#include <linux/dm-ioctl.h>
#include <linux/errqueue.h>
#include <linux/ethtool_netlink.h>
#include <linux/falloc.h>
#include <linux/fanotify.h>
#include <linux/filter.h>
@ -212,6 +215,7 @@ struct ltchars {
#include <linux/fsverity.h>
#include <linux/genetlink.h>
#include <linux/hdreg.h>
#include <linux/hidraw.h>
#include <linux/icmpv6.h>
#include <linux/if.h>
#include <linux/if_addr.h>
@ -222,6 +226,7 @@ struct ltchars {
#include <linux/if_tun.h>
#include <linux/if_packet.h>
#include <linux/if_xdp.h>
#include <linux/input.h>
#include <linux/kexec.h>
#include <linux/keyctl.h>
#include <linux/loop.h>
@ -298,6 +303,17 @@ struct ltchars {
// Including linux/l2tp.h here causes conflicts between linux/in.h
// and netinet/in.h included via net/route.h above.
#define IPPROTO_L2TP 115
// Copied from linux/hid.h.
// Keep in sync with the size of the referenced fields.
#define _HIDIOCGRAWNAME_LEN 128 // sizeof_field(struct hid_device, name)
#define _HIDIOCGRAWPHYS_LEN 64 // sizeof_field(struct hid_device, phys)
#define _HIDIOCGRAWUNIQ_LEN 64 // sizeof_field(struct hid_device, uniq)
#define _HIDIOCGRAWNAME HIDIOCGRAWNAME(_HIDIOCGRAWNAME_LEN)
#define _HIDIOCGRAWPHYS HIDIOCGRAWPHYS(_HIDIOCGRAWPHYS_LEN)
#define _HIDIOCGRAWUNIQ HIDIOCGRAWUNIQ(_HIDIOCGRAWUNIQ_LEN)
'
includes_NetBSD='
@ -445,6 +461,8 @@ ccflags="$@"
$2 !~ /^EPROC_/ &&
$2 !~ /^EQUIV_/ &&
$2 !~ /^EXPR_/ &&
$2 !~ /^EVIOC/ &&
$2 !~ /^EV_/ &&
$2 ~ /^E[A-Z0-9_]+$/ ||
$2 ~ /^B[0-9_]+$/ ||
$2 ~ /^(OLD|NEW)DEV$/ ||
@ -479,7 +497,7 @@ ccflags="$@"
$2 ~ /^LOCK_(SH|EX|NB|UN)$/ ||
$2 ~ /^LO_(KEY|NAME)_SIZE$/ ||
$2 ~ /^LOOP_(CLR|CTL|GET|SET)_/ ||
$2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|ICMP6|TCP|MCAST|EVFILT|NOTE|EV|SHUT|PROT|MAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR)_/ ||
$2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|ICMP6|TCP|MCAST|EVFILT|NOTE|SHUT|PROT|MAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR|LOCAL)_/ ||
$2 ~ /^TP_STATUS_/ ||
$2 ~ /^FALLOC_/ ||
$2 == "ICMPV6_FILTER" ||
@ -563,11 +581,15 @@ ccflags="$@"
$2 ~ /^TIPC_/ ||
$2 !~ "DEVLINK_RELOAD_LIMITS_VALID_MASK" &&
$2 ~ /^DEVLINK_/ ||
$2 ~ /^ETHTOOL_/ ||
$2 ~ /^LWTUNNEL_IP/ ||
$2 !~ "WMESGLEN" &&
$2 ~ /^W[A-Z0-9]+$/ ||
$2 ~/^PPPIOC/ ||
$2 ~ /^FAN_|FANOTIFY_/ ||
$2 == "HID_MAX_DESCRIPTOR_SIZE" ||
$2 ~ /^_?HIDIOC/ ||
$2 ~ /^BUS_(USB|HIL|BLUETOOTH|VIRTUAL)$/ ||
$2 ~ /^BLK[A-Z]*(GET$|SET$|BUF$|PART$|SIZE)/ {printf("\t%s = C.%s\n", $2, $2)}
$2 ~ /^__WCOREFLAG$/ {next}
$2 ~ /^__W[A-Z0-9]+$/ {printf("\t%s = C.%s\n", substr($2,3), $2)}

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
// For Unix, get the pagesize from the runtime.

12
vendor/golang.org/x/sys/unix/ptrace_darwin.go generated vendored Normal file
View File

@ -0,0 +1,12 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build darwin && !ios
// +build darwin,!ios
package unix
func ptrace(request int, pid int, addr uintptr, data uintptr) error {
return ptrace1(request, pid, addr, data)
}

12
vendor/golang.org/x/sys/unix/ptrace_ios.go generated vendored Normal file
View File

@ -0,0 +1,12 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build ios
// +build ios
package unix
func ptrace(request int, pid int, addr uintptr, data uintptr) (err error) {
return ENOTSUP
}

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build (darwin && race) || (linux && race) || (freebsd && race)
// +build darwin,race linux,race freebsd,race
package unix

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin,!race linux,!race freebsd,!race netbsd openbsd solaris dragonfly
//go:build aix || (darwin && !race) || (linux && !race) || (freebsd && !race) || netbsd || openbsd || solaris || dragonfly || zos
// +build aix darwin,!race linux,!race freebsd,!race netbsd openbsd solaris dragonfly zos
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build aix || dragonfly || freebsd || linux || netbsd || openbsd
// +build aix dragonfly freebsd linux netbsd openbsd
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build darwin
// +build darwin
package unix

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
// Socket control messages

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin freebsd linux netbsd openbsd solaris
//go:build aix || darwin || freebsd || linux || netbsd || openbsd || solaris || zos
// +build aix darwin freebsd linux netbsd openbsd solaris zos
package unix
@ -36,6 +37,10 @@ func cmsgAlignOf(salen int) int {
if runtime.GOOS == "netbsd" && runtime.GOARCH == "arm64" {
salign = 16
}
case "zos":
// z/OS socket macros use [32-bit] sizeof(int) alignment,
// not pointer width.
salign = SizeofInt
}
return (salen + salign - 1) & ^(salign - 1)

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
package unix

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
// Package unix contains an interface to the low-level operating system
// primitives. OS details vary depending on the underlying system, and

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build aix
// +build aix
// Aix system calls.
@ -419,8 +420,8 @@ func (w WaitStatus) TrapCause() int { return -1 }
//sys Mknod(path string, mode uint32, dev int) (err error)
//sys Mknodat(dirfd int, path string, mode uint32, dev int) (err error)
//sys Nanosleep(time *Timespec, leftover *Timespec) (err error)
//sys Open(path string, mode int, perm uint32) (fd int, err error) = open64
//sys Openat(dirfd int, path string, flags int, mode uint32) (fd int, err error)
//sys Open(path string, mode int, perm uint32) (fd int, err error) = open64
//sys Openat(dirfd int, path string, flags int, mode uint32) (fd int, err error)
//sys read(fd int, p []byte) (n int, err error)
//sys Readlink(path string, buf []byte) (n int, err error)
//sys Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error)
@ -439,8 +440,8 @@ func (w WaitStatus) TrapCause() int { return -1 }
//sysnb Times(tms *Tms) (ticks uintptr, err error)
//sysnb Umask(mask int) (oldmask int)
//sysnb Uname(buf *Utsname) (err error)
//sys Unlink(path string) (err error)
//sys Unlinkat(dirfd int, path string, flags int) (err error)
//sys Unlink(path string) (err error)
//sys Unlinkat(dirfd int, path string, flags int) (err error)
//sys Ustat(dev int, ubuf *Ustat_t) (err error)
//sys write(fd int, p []byte) (n int, err error)
//sys readlen(fd int, p *byte, np int) (n int, err error) = read
@ -514,7 +515,7 @@ func Munmap(b []byte) (err error) {
//sys Munlock(b []byte) (err error)
//sys Munlockall() (err error)
//sysnb pipe(p *[2]_C_int) (err error)
//sysnb pipe(p *[2]_C_int) (err error)
func Pipe(p []int) (err error) {
if len(p) != 2 {

View File

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix
// +build ppc
//go:build aix && ppc
// +build aix,ppc
package unix

View File

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build aix
// +build ppc64
//go:build aix && ppc64
// +build aix,ppc64
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build darwin || dragonfly || freebsd || netbsd || openbsd
// +build darwin dragonfly freebsd netbsd openbsd
// BSD system call wrappers shared by *BSD based systems
@ -318,7 +319,7 @@ func Getsockname(fd int) (sa Sockaddr, err error) {
return anyToSockaddr(fd, &rsa)
}
//sysnb socketpair(domain int, typ int, proto int, fd *[2]int32) (err error)
//sysnb socketpair(domain int, typ int, proto int, fd *[2]int32) (err error)
// GetsockoptString returns the string value of the socket option opt for the
// socket associated with fd at the given socket level.
@ -332,8 +333,8 @@ func GetsockoptString(fd, level, opt int) (string, error) {
return string(buf[:vallen-1]), nil
}
//sys recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error)
//sys sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error)
//sys recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error)
//sys sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error)
//sys recvmsg(s int, msg *Msghdr, flags int) (n int, err error)
func Recvmsg(fd int, p, oob []byte, flags int) (n, oobn int, recvflags int, from Sockaddr, err error) {
@ -626,7 +627,7 @@ func Futimes(fd int, tv []Timeval) error {
return futimes(fd, (*[2]Timeval)(unsafe.Pointer(&tv[0])))
}
//sys poll(fds *PollFd, nfds int, timeout int) (n int, err error)
//sys poll(fds *PollFd, nfds int, timeout int) (n int, err error)
func Poll(fds []PollFd, timeout int) (n int, err error) {
if len(fds) == 0 {

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build darwin && go1.12 && !go1.13
// +build darwin,go1.12,!go1.13
package unix

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build darwin && go1.13
// +build darwin,go1.13
package unix

View File

@ -119,13 +119,16 @@ type attrList struct {
Forkattr uint32
}
//sysnb pipe() (r int, w int, err error)
//sysnb pipe(p *[2]int32) (err error)
func Pipe(p []int) (err error) {
if len(p) != 2 {
return EINVAL
}
p[0], p[1], err = pipe()
var x [2]int32
err = pipe(&x)
p[0] = int(x[0])
p[1] = int(x[1])
return
}
@ -269,7 +272,7 @@ func setattrlistTimes(path string, times []Timespec, flags int) error {
options)
}
//sys setattrlist(path *byte, list unsafe.Pointer, buf unsafe.Pointer, size uintptr, options int) (err error)
//sys setattrlist(path *byte, list unsafe.Pointer, buf unsafe.Pointer, size uintptr, options int) (err error)
func utimensat(dirfd int, path string, times *[2]Timespec, flags int) error {
// Darwin doesn't support SYS_UTIMENSAT
@ -317,7 +320,7 @@ func IoctlSetIfreqMTU(fd int, ifreq *IfreqMTU) error {
return err
}
//sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS_SYSCTL
//sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS_SYSCTL
func Uname(uname *Utsname) error {
mib := []_C_int{CTL_KERN, KERN_OSTYPE}
@ -375,6 +378,15 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e
return
}
// GetsockoptXucred is a getsockopt wrapper that returns an Xucred struct.
// The usual level and opt are SOL_LOCAL and LOCAL_PEERCRED, respectively.
func GetsockoptXucred(fd, level, opt int) (*Xucred, error) {
x := new(Xucred)
vallen := _Socklen(SizeofXucred)
err := getsockopt(fd, level, opt, unsafe.Pointer(x), &vallen)
return x, err
}
//sys sendfile(infd int, outfd int, offset int64, len *int64, hdtr unsafe.Pointer, flags int) (err error)
/*
@ -469,8 +481,8 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e
//sys Unlinkat(dirfd int, path string, flags int) (err error)
//sys Unmount(path string, flags int) (err error)
//sys write(fd int, p []byte) (n int, err error)
//sys mmap(addr uintptr, length uintptr, prot int, flag int, fd int, pos int64) (ret uintptr, err error)
//sys munmap(addr uintptr, length uintptr) (err error)
//sys mmap(addr uintptr, length uintptr, prot int, flag int, fd int, pos int64) (ret uintptr, err error)
//sys munmap(addr uintptr, length uintptr) (err error)
//sys readlen(fd int, buf *byte, nbuf int) (n int, err error) = SYS_READ
//sys writelen(fd int, buf *byte, nbuf int) (n int, err error) = SYS_WRITE

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build 386 && darwin
// +build 386,darwin
package unix
@ -45,6 +46,6 @@ func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr,
//sys Fstatfs(fd int, stat *Statfs_t) (err error) = SYS_FSTATFS64
//sys getfsstat(buf unsafe.Pointer, size uintptr, flags int) (n int, err error) = SYS_GETFSSTAT64
//sys Lstat(path string, stat *Stat_t) (err error) = SYS_LSTAT64
//sys ptrace(request int, pid int, addr uintptr, data uintptr) (err error)
//sys ptrace1(request int, pid int, addr uintptr, data uintptr) (err error) = SYS_ptrace
//sys Stat(path string, stat *Stat_t) (err error) = SYS_STAT64
//sys Statfs(path string, stat *Statfs_t) (err error) = SYS_STATFS64

View File

@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build amd64 && darwin
// +build amd64,darwin
package unix
@ -45,6 +46,6 @@ func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr,
//sys Fstatfs(fd int, stat *Statfs_t) (err error) = SYS_FSTATFS64
//sys getfsstat(buf unsafe.Pointer, size uintptr, flags int) (n int, err error) = SYS_GETFSSTAT64
//sys Lstat(path string, stat *Stat_t) (err error) = SYS_LSTAT64
//sys ptrace(request int, pid int, addr uintptr, data uintptr) (err error)
//sys ptrace1(request int, pid int, addr uintptr, data uintptr) (err error) = SYS_ptrace
//sys Stat(path string, stat *Stat_t) (err error) = SYS_STAT64
//sys Statfs(path string, stat *Statfs_t) (err error) = SYS_STATFS64

View File

@ -6,7 +6,7 @@ package unix
import "syscall"
func ptrace(request int, pid int, addr uintptr, data uintptr) error {
func ptrace1(request int, pid int, addr uintptr, data uintptr) error {
return ENOTSUP
}

Some files were not shown because too many files have changed in this diff Show More