mirror of
https://salsa.debian.org/mdosch/feed-to-muc.git
synced 2024-11-22 22:18:39 +01:00
Update vendored dependencies.
This commit is contained in:
parent
8c4ec690a5
commit
de5bd63a6e
154 changed files with 15041 additions and 1824 deletions
20
go.mod
20
go.mod
|
@ -3,15 +3,17 @@ module salsa.debian.org/mdosch/feed-to-muc
|
|||
go 1.14
|
||||
|
||||
require (
|
||||
github.com/PuerkitoBio/goquery v1.5.1-0.20190109230704-3dcf72e6c17f // indirect
|
||||
github.com/andybalholm/cascadia v1.0.1-0.20181012154424-680b6a57bda4 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.6.0 // indirect
|
||||
github.com/andybalholm/cascadia v1.2.0 // indirect
|
||||
github.com/chilts/sid v0.0.0-20190607042430-660e94789ec9
|
||||
github.com/jaytaylor/html2text v0.0.0-20190408195923-01ec452cbe43
|
||||
github.com/mattn/go-runewidth v0.0.5-0.20181218000649-703b5e6b11ae // indirect
|
||||
github.com/mattn/go-xmpp v0.0.0-20190124093244-6093f50721ed
|
||||
github.com/mmcdole/gofeed v1.0.0-beta2.0.20190420154928-0e68beaf6fdf
|
||||
github.com/olekukonko/tablewriter v0.0.2-0.20190618033246-cc27d85e17ce // indirect
|
||||
github.com/jaytaylor/html2text v0.0.0-20200412013138-3577fbdbcff7
|
||||
github.com/mattn/go-runewidth v0.0.9 // indirect
|
||||
github.com/mattn/go-xmpp v0.0.0-20200309091041-899ef71e80d2
|
||||
github.com/mmcdole/gofeed v1.1.0
|
||||
github.com/mmcdole/goxpp v0.0.0-20200921145534-2f3784f67354 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.1 // indirect
|
||||
github.com/olekukonko/tablewriter v0.0.4 // indirect
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
|
||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 // indirect
|
||||
golang.org/x/text v0.3.3-0.20190829152558-3d0f7978add9 // indirect
|
||||
golang.org/x/net v0.0.0-20201024042810-be3efd7ff127 // indirect
|
||||
)
|
||||
|
|
79
go.sum
79
go.sum
|
@ -1,42 +1,67 @@
|
|||
github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
|
||||
github.com/PuerkitoBio/goquery v1.5.1-0.20190109230704-3dcf72e6c17f h1:cWOyRTtBcTBjB0c+GyaQaXgP3g1HVM1KbvZL/Q5QNAM=
|
||||
github.com/PuerkitoBio/goquery v1.5.1-0.20190109230704-3dcf72e6c17f/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
|
||||
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
github.com/andybalholm/cascadia v1.0.1-0.20181012154424-680b6a57bda4 h1:0VxwDG1tnYzSd2wMWuTjZtwdm5ZB0Drjt+HaJHeCho0=
|
||||
github.com/andybalholm/cascadia v1.0.1-0.20181012154424-680b6a57bda4/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc=
|
||||
github.com/PuerkitoBio/goquery v1.6.0 h1:j7taAbelrdcsOlGeMenZxc2AWXD5fieT1/znArdnx94=
|
||||
github.com/PuerkitoBio/goquery v1.6.0/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc=
|
||||
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
github.com/andybalholm/cascadia v1.2.0 h1:vuRCkM5Ozh/BfmsaTm26kbjm0mIOM3yS5Ek/F5h18aE=
|
||||
github.com/andybalholm/cascadia v1.2.0/go.mod h1:YCyR8vOZT9aZ1CHEd8ap0gMVm2aFgxBp0T0eFw1RUQY=
|
||||
github.com/chilts/sid v0.0.0-20190607042430-660e94789ec9 h1:z0uK8UQqjMVYzvk4tiiu3obv2B44+XBsvgEJREQfnO8=
|
||||
github.com/chilts/sid v0.0.0-20190607042430-660e94789ec9/go.mod h1:Jl2neWsQaDanWORdqZ4emBl50J4/aRBBS4FyyG9/PFo=
|
||||
github.com/codegangsta/cli v1.20.0/go.mod h1:/qJNoX69yVSKu5o4jLyXAENLRyk1uhi7zkbQ3slBdOA=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/jaytaylor/html2text v0.0.0-20190408195923-01ec452cbe43 h1:jTkyeF7NZ5oIr0ESmcrpiDgAfoidCBF4F5kJhjtaRwE=
|
||||
github.com/jaytaylor/html2text v0.0.0-20190408195923-01ec452cbe43/go.mod h1:CVKlgaMiht+LXvHG173ujK6JUhZXKb2u/BQtjPDIvyk=
|
||||
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-runewidth v0.0.5-0.20181218000649-703b5e6b11ae h1:575usOHCDzxtuyWeSjPySew7uvoOuaCsM0uUesjgAo4=
|
||||
github.com/mattn/go-runewidth v0.0.5-0.20181218000649-703b5e6b11ae/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-xmpp v0.0.0-20190124093244-6093f50721ed h1:A1hEQg5M0b3Wg06pm3q/B0wdZsPjVQ/a2IgauQ8wCZo=
|
||||
github.com/mattn/go-xmpp v0.0.0-20190124093244-6093f50721ed/go.mod h1:Cs5mF0OsrRRmhkyOod//ldNPOwJsrBvJ+1WRspv0xoc=
|
||||
github.com/mmcdole/gofeed v1.0.0-beta2.0.20190420154928-0e68beaf6fdf h1:poo3e5STwUVGyyUX0e3fHQUwT1tV8IYEFUUpYd/7LuA=
|
||||
github.com/mmcdole/gofeed v1.0.0-beta2.0.20190420154928-0e68beaf6fdf/go.mod h1:tkVcyzS3qVMlQrQxJoEH1hkTiuo9a8emDzkMi7TZBu0=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/jaytaylor/html2text v0.0.0-20200412013138-3577fbdbcff7 h1:g0fAGBisHaEQ0TRq1iBvemFRf+8AEWEmBESSiWB3Vsc=
|
||||
github.com/jaytaylor/html2text v0.0.0-20200412013138-3577fbdbcff7/go.mod h1:CVKlgaMiht+LXvHG173ujK6JUhZXKb2u/BQtjPDIvyk=
|
||||
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-xmpp v0.0.0-20200309091041-899ef71e80d2 h1:F544zRtDc/pMpFNHN46oeXV2jIAG4DoMH+6zlVSn0Q8=
|
||||
github.com/mattn/go-xmpp v0.0.0-20200309091041-899ef71e80d2/go.mod h1:Cs5mF0OsrRRmhkyOod//ldNPOwJsrBvJ+1WRspv0xoc=
|
||||
github.com/mmcdole/gofeed v1.1.0 h1:T2WrGLVJRV04PY2qwhEJLHCt9JiCtBhb6SmC8ZvJH08=
|
||||
github.com/mmcdole/gofeed v1.1.0/go.mod h1:PPiVwgDXLlz2N83KB4TrIim2lyYM5Zn7ZWH9Pi4oHUk=
|
||||
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf h1:sWGE2v+hO0Nd4yFU/S/mDBM5plIU8v/Qhfz41hkDIAI=
|
||||
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf/go.mod h1:pasqhqstspkosTneA62Nc+2p9SOBBYAPbnmRRWPQ0V8=
|
||||
github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
|
||||
github.com/olekukonko/tablewriter v0.0.2-0.20190618033246-cc27d85e17ce h1:RLmZmfx/K62HKpbwPqtW3tg+V2GgugN/XNNx+uiMH/Y=
|
||||
github.com/olekukonko/tablewriter v0.0.2-0.20190618033246-cc27d85e17ce/go.mod h1:rSAaSIOAGT9odnlyGlUfAJaoc5w2fSBUmeGDbRWPxyQ=
|
||||
github.com/mmcdole/goxpp v0.0.0-20200921145534-2f3784f67354 h1:Z6i7ND25ixRtXFBylIUggqpvLMV1I15yprcqMVB7WZA=
|
||||
github.com/mmcdole/goxpp v0.0.0-20200921145534-2f3784f67354/go.mod h1:pasqhqstspkosTneA62Nc+2p9SOBBYAPbnmRRWPQ0V8=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=
|
||||
github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf h1:pvbZ0lM0XWPBqUKqFU8cmavspvIl9nulOYwdy6IFRRo=
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02nZ62WenDCkgHFerpIOmW0iT7GKmXM=
|
||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/urfave/cli v1.22.3/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 h1:k7pJ2yAPLPgbskkFdhRCsA77k2fySZ1zf2zCjvQCiIM=
|
||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201024042810-be3efd7ff127 h1:pZPp9+iYUqwYKLjht0SDBbRCRK/9gAXDy7pz5fRDpjo=
|
||||
golang.org/x/net v0.0.0-20201024042810-be3efd7ff127/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3-0.20190829152558-3d0f7978add9 h1:SiG/YZHGpKRm5dMlAIMyiH/U4tSjw72M90ryHlc/rto=
|
||||
golang.org/x/text v0.3.3-0.20190829152558-3d0f7978add9/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
|
5
vendor/github.com/PuerkitoBio/goquery/.travis.yml
generated
vendored
5
vendor/github.com/PuerkitoBio/goquery/.travis.yml
generated
vendored
|
@ -1,7 +1,6 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- 1.1
|
||||
- 1.2.x
|
||||
- 1.3.x
|
||||
- 1.4.x
|
||||
|
@ -10,7 +9,9 @@ go:
|
|||
- 1.7.x
|
||||
- 1.8.x
|
||||
- 1.9.x
|
||||
- "1.10.x"
|
||||
- 1.10.x
|
||||
- 1.11.x
|
||||
- 1.12.x
|
||||
- 1.13.x
|
||||
- tip
|
||||
|
||||
|
|
8
vendor/github.com/PuerkitoBio/goquery/README.md
generated
vendored
8
vendor/github.com/PuerkitoBio/goquery/README.md
generated
vendored
|
@ -37,6 +37,8 @@ Please note that because of the net/html dependency, goquery requires Go1.1+.
|
|||
|
||||
**Note that goquery's API is now stable, and will not break.**
|
||||
|
||||
* **2020-10-08 (v1.6.0)** : Parse html in context of the container node for all functions that deal with html strings (`AfterHtml`, `AppendHtml`, etc.). Thanks to [@thiemok][thiemok] and [@davidjwilkins][djw] for their work on this.
|
||||
* **2020-02-04 (v1.5.1)** : Update module dependencies.
|
||||
* **2018-11-15 (v1.5.0)** : Go module support (thanks @Zaba505).
|
||||
* **2018-06-07 (v1.4.1)** : Add `NewDocumentFromReader` examples.
|
||||
* **2018-03-24 (v1.4.0)** : Deprecate `NewDocument(url)` and `NewDocumentFromResponse(response)`.
|
||||
|
@ -138,11 +140,13 @@ func main() {
|
|||
- [Goq][goq], an HTML deserialization and scraping library based on goquery and struct tags.
|
||||
- [andybalholm/cascadia][cascadia], the CSS selector library used by goquery.
|
||||
- [suntong/cascadia][cascadiacli], a command-line interface to the cascadia CSS selector library, useful to test selectors.
|
||||
- [asciimoo/colly](https://github.com/asciimoo/colly), a lightning fast and elegant Scraping Framework
|
||||
- [gocolly/colly](https://github.com/gocolly/colly), a lightning fast and elegant Scraping Framework
|
||||
- [gnulnx/goperf](https://github.com/gnulnx/goperf), a website performance test tool that also fetches static assets.
|
||||
- [MontFerret/ferret](https://github.com/MontFerret/ferret), declarative web scraping.
|
||||
- [tacusci/berrycms](https://github.com/tacusci/berrycms), a modern simple to use CMS with easy to write plugins
|
||||
- [Dataflow kit](https://github.com/slotix/dataflowkit), Web Scraping framework for Gophers.
|
||||
- [Geziyor](https://github.com/geziyor/geziyor), a fast web crawling & scraping framework for Go. Supports JS rendering.
|
||||
- [Pagser](https://github.com/foolin/pagser), a simple, easy, extensible, configurable HTML parser to struct based on goquery and struct tags.
|
||||
|
||||
## Support
|
||||
|
||||
|
@ -179,3 +183,5 @@ The [BSD 3-Clause license][bsd], the same as the [Go language][golic]. Cascadia'
|
|||
[thatguystone]: https://github.com/thatguystone
|
||||
[piotr]: https://github.com/piotrkowalczuk
|
||||
[goq]: https://github.com/andrewstuart/goq
|
||||
[thiemok]: https://github.com/thiemok
|
||||
[djw]: https://github.com/davidjwilkins
|
||||
|
|
6
vendor/github.com/PuerkitoBio/goquery/go.mod
generated
vendored
6
vendor/github.com/PuerkitoBio/goquery/go.mod
generated
vendored
|
@ -1,6 +1,8 @@
|
|||
module github.com/PuerkitoBio/goquery
|
||||
|
||||
require (
|
||||
github.com/andybalholm/cascadia v1.0.0
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a
|
||||
github.com/andybalholm/cascadia v1.1.0
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2
|
||||
)
|
||||
|
||||
go 1.13
|
||||
|
|
11
vendor/github.com/PuerkitoBio/goquery/go.sum
generated
vendored
11
vendor/github.com/PuerkitoBio/goquery/go.sum
generated
vendored
|
@ -1,5 +1,8 @@
|
|||
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
|
||||
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
github.com/andybalholm/cascadia v1.1.0 h1:BuuO6sSfQNFRu1LppgbD25Hr2vLYW25JvxHs5zzsLTo=
|
||||
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a h1:gOpx8G595UYyvj8UK4+OFyY4rx037g3fmfhe5SasG3U=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
|
164
vendor/github.com/PuerkitoBio/goquery/manipulation.go
generated
vendored
164
vendor/github.com/PuerkitoBio/goquery/manipulation.go
generated
vendored
|
@ -39,8 +39,15 @@ func (s *Selection) AfterSelection(sel *Selection) *Selection {
|
|||
// AfterHtml parses the html and inserts it after the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) AfterHtml(html string) *Selection {
|
||||
return s.AfterNodes(parseHtml(html)...)
|
||||
func (s *Selection) AfterHtml(htmlStr string) *Selection {
|
||||
return s.eachNodeHtml(htmlStr, true, func(node *html.Node, nodes []*html.Node) {
|
||||
nextSibling := node.NextSibling
|
||||
for _, n := range nodes {
|
||||
if node.Parent != nil {
|
||||
node.Parent.InsertBefore(n, nextSibling)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// AfterNodes inserts the nodes after each element in the set of matched elements.
|
||||
|
@ -85,8 +92,12 @@ func (s *Selection) AppendSelection(sel *Selection) *Selection {
|
|||
}
|
||||
|
||||
// AppendHtml parses the html and appends it to the set of matched elements.
|
||||
func (s *Selection) AppendHtml(html string) *Selection {
|
||||
return s.AppendNodes(parseHtml(html)...)
|
||||
func (s *Selection) AppendHtml(htmlStr string) *Selection {
|
||||
return s.eachNodeHtml(htmlStr, false, func(node *html.Node, nodes []*html.Node) {
|
||||
for _, n := range nodes {
|
||||
node.AppendChild(n)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// AppendNodes appends the specified nodes to each node in the set of matched elements.
|
||||
|
@ -123,8 +134,14 @@ func (s *Selection) BeforeSelection(sel *Selection) *Selection {
|
|||
// BeforeHtml parses the html and inserts it before the set of matched elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) BeforeHtml(html string) *Selection {
|
||||
return s.BeforeNodes(parseHtml(html)...)
|
||||
func (s *Selection) BeforeHtml(htmlStr string) *Selection {
|
||||
return s.eachNodeHtml(htmlStr, true, func(node *html.Node, nodes []*html.Node) {
|
||||
for _, n := range nodes {
|
||||
if node.Parent != nil {
|
||||
node.Parent.InsertBefore(n, node)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// BeforeNodes inserts the nodes before each element in the set of matched elements.
|
||||
|
@ -184,8 +201,13 @@ func (s *Selection) PrependSelection(sel *Selection) *Selection {
|
|||
}
|
||||
|
||||
// PrependHtml parses the html and prepends it to the set of matched elements.
|
||||
func (s *Selection) PrependHtml(html string) *Selection {
|
||||
return s.PrependNodes(parseHtml(html)...)
|
||||
func (s *Selection) PrependHtml(htmlStr string) *Selection {
|
||||
return s.eachNodeHtml(htmlStr, false, func(node *html.Node, nodes []*html.Node) {
|
||||
firstChild := node.FirstChild
|
||||
for _, n := range nodes {
|
||||
node.InsertBefore(n, firstChild)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// PrependNodes prepends the specified nodes to each node in the set of
|
||||
|
@ -212,14 +234,19 @@ func (s *Selection) Remove() *Selection {
|
|||
return s
|
||||
}
|
||||
|
||||
// RemoveFiltered removes the set of matched elements by selector.
|
||||
// It returns the Selection of removed nodes.
|
||||
// RemoveFiltered removes from the current set of matched elements those that
|
||||
// match the selector filter. It returns the Selection of removed nodes.
|
||||
//
|
||||
// For example if the selection s contains "<h1>", "<h2>" and "<h3>"
|
||||
// and s.RemoveFiltered("h2") is called, only the "<h2>" node is removed
|
||||
// (and returned), while "<h1>" and "<h3>" are kept in the document.
|
||||
func (s *Selection) RemoveFiltered(selector string) *Selection {
|
||||
return s.RemoveMatcher(compileMatcher(selector))
|
||||
}
|
||||
|
||||
// RemoveMatcher removes the set of matched elements.
|
||||
// It returns the Selection of removed nodes.
|
||||
// RemoveMatcher removes from the current set of matched elements those that
|
||||
// match the Matcher filter. It returns the Selection of removed nodes.
|
||||
// See RemoveFiltered for additional information.
|
||||
func (s *Selection) RemoveMatcher(m Matcher) *Selection {
|
||||
return s.FilterMatcher(m).Remove()
|
||||
}
|
||||
|
@ -256,8 +283,16 @@ func (s *Selection) ReplaceWithSelection(sel *Selection) *Selection {
|
|||
// It returns the removed elements.
|
||||
//
|
||||
// This follows the same rules as Selection.Append.
|
||||
func (s *Selection) ReplaceWithHtml(html string) *Selection {
|
||||
return s.ReplaceWithNodes(parseHtml(html)...)
|
||||
func (s *Selection) ReplaceWithHtml(htmlStr string) *Selection {
|
||||
s.eachNodeHtml(htmlStr, true, func(node *html.Node, nodes []*html.Node) {
|
||||
nextSibling := node.NextSibling
|
||||
for _, n := range nodes {
|
||||
if node.Parent != nil {
|
||||
node.Parent.InsertBefore(n, nextSibling)
|
||||
}
|
||||
}
|
||||
})
|
||||
return s.Remove()
|
||||
}
|
||||
|
||||
// ReplaceWithNodes replaces each element in the set of matched elements with
|
||||
|
@ -272,8 +307,17 @@ func (s *Selection) ReplaceWithNodes(ns ...*html.Node) *Selection {
|
|||
|
||||
// SetHtml sets the html content of each element in the selection to
|
||||
// specified html string.
|
||||
func (s *Selection) SetHtml(html string) *Selection {
|
||||
return setHtmlNodes(s, parseHtml(html)...)
|
||||
func (s *Selection) SetHtml(htmlStr string) *Selection {
|
||||
for _, context := range s.Nodes {
|
||||
for c := context.FirstChild; c != nil; c = context.FirstChild {
|
||||
context.RemoveChild(c)
|
||||
}
|
||||
}
|
||||
return s.eachNodeHtml(htmlStr, false, func(node *html.Node, nodes []*html.Node) {
|
||||
for _, n := range nodes {
|
||||
node.AppendChild(n)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// SetText sets the content of each element in the selection to specified content.
|
||||
|
@ -329,8 +373,23 @@ func (s *Selection) WrapSelection(sel *Selection) *Selection {
|
|||
// most child of the given HTML.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapHtml(html string) *Selection {
|
||||
return s.wrapNodes(parseHtml(html)...)
|
||||
func (s *Selection) WrapHtml(htmlStr string) *Selection {
|
||||
nodesMap := make(map[string][]*html.Node)
|
||||
for _, context := range s.Nodes {
|
||||
var parent *html.Node
|
||||
if context.Parent != nil {
|
||||
parent = context.Parent
|
||||
} else {
|
||||
parent = &html.Node{Type: html.ElementNode}
|
||||
}
|
||||
nodes, found := nodesMap[nodeName(parent)]
|
||||
if !found {
|
||||
nodes = parseHtmlWithContext(htmlStr, parent)
|
||||
nodesMap[nodeName(parent)] = nodes
|
||||
}
|
||||
newSingleSelection(context, s.document).wrapAllNodes(cloneNodes(nodes)...)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// WrapNode wraps each element in the set of matched elements inside the inner-
|
||||
|
@ -382,8 +441,18 @@ func (s *Selection) WrapAllSelection(sel *Selection) *Selection {
|
|||
// document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapAllHtml(html string) *Selection {
|
||||
return s.wrapAllNodes(parseHtml(html)...)
|
||||
func (s *Selection) WrapAllHtml(htmlStr string) *Selection {
|
||||
var context *html.Node
|
||||
var nodes []*html.Node
|
||||
if len(s.Nodes) > 0 {
|
||||
context = s.Nodes[0]
|
||||
if context.Parent != nil {
|
||||
nodes = parseHtmlWithContext(htmlStr, context)
|
||||
} else {
|
||||
nodes = parseHtml(htmlStr)
|
||||
}
|
||||
}
|
||||
return s.wrapAllNodes(nodes...)
|
||||
}
|
||||
|
||||
func (s *Selection) wrapAllNodes(ns ...*html.Node) *Selection {
|
||||
|
@ -452,8 +521,17 @@ func (s *Selection) WrapInnerSelection(sel *Selection) *Selection {
|
|||
// cloned before being inserted into the document.
|
||||
//
|
||||
// It returns the original set of elements.
|
||||
func (s *Selection) WrapInnerHtml(html string) *Selection {
|
||||
return s.wrapInnerNodes(parseHtml(html)...)
|
||||
func (s *Selection) WrapInnerHtml(htmlStr string) *Selection {
|
||||
nodesMap := make(map[string][]*html.Node)
|
||||
for _, context := range s.Nodes {
|
||||
nodes, found := nodesMap[nodeName(context)]
|
||||
if !found {
|
||||
nodes = parseHtmlWithContext(htmlStr, context)
|
||||
nodesMap[nodeName(context)] = nodes
|
||||
}
|
||||
newSingleSelection(context, s.document).wrapInnerNodes(cloneNodes(nodes)...)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// WrapInnerNode wraps an HTML structure, matched by the given selector, around
|
||||
|
@ -493,16 +571,14 @@ func parseHtml(h string) []*html.Node {
|
|||
return nodes
|
||||
}
|
||||
|
||||
func setHtmlNodes(s *Selection, ns ...*html.Node) *Selection {
|
||||
for _, n := range s.Nodes {
|
||||
for c := n.FirstChild; c != nil; c = n.FirstChild {
|
||||
n.RemoveChild(c)
|
||||
func parseHtmlWithContext(h string, context *html.Node) []*html.Node {
|
||||
// Errors are only returned when the io.Reader returns any error besides
|
||||
// EOF, but strings.Reader never will
|
||||
nodes, err := html.ParseFragment(strings.NewReader(h), context)
|
||||
if err != nil {
|
||||
panic("goquery: failed to parse HTML: " + err.Error())
|
||||
}
|
||||
for _, c := range ns {
|
||||
n.AppendChild(cloneNode(c))
|
||||
}
|
||||
}
|
||||
return s
|
||||
return nodes
|
||||
}
|
||||
|
||||
// Get the first child that is an ElementNode
|
||||
|
@ -572,3 +648,29 @@ func (s *Selection) manipulateNodes(ns []*html.Node, reverse bool,
|
|||
|
||||
return s
|
||||
}
|
||||
|
||||
// eachNodeHtml parses the given html string and inserts the resulting nodes in the dom with the mergeFn.
|
||||
// The parsed nodes are inserted for each element of the selection.
|
||||
// isParent can be used to indicate that the elements of the selection should be treated as the parent for the parsed html.
|
||||
// A cache is used to avoid parsing the html multiple times should the elements of the selection result in the same context.
|
||||
func (s *Selection) eachNodeHtml(htmlStr string, isParent bool, mergeFn func(n *html.Node, nodes []*html.Node)) *Selection {
|
||||
// cache to avoid parsing the html for the same context multiple times
|
||||
nodeCache := make(map[string][]*html.Node)
|
||||
var context *html.Node
|
||||
for _, n := range s.Nodes {
|
||||
if isParent {
|
||||
context = n.Parent
|
||||
} else {
|
||||
context = n
|
||||
}
|
||||
if context != nil {
|
||||
nodes, found := nodeCache[nodeName(context)]
|
||||
if !found {
|
||||
nodes = parseHtmlWithContext(htmlStr, context)
|
||||
nodeCache[nodeName(context)] = nodes
|
||||
}
|
||||
mergeFn(n, cloneNodes(nodes))
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
|
18
vendor/github.com/PuerkitoBio/goquery/utilities.go
generated
vendored
18
vendor/github.com/PuerkitoBio/goquery/utilities.go
generated
vendored
|
@ -36,12 +36,22 @@ func NodeName(s *Selection) string {
|
|||
if s.Length() == 0 {
|
||||
return ""
|
||||
}
|
||||
switch n := s.Get(0); n.Type {
|
||||
return nodeName(s.Get(0))
|
||||
}
|
||||
|
||||
// nodeName returns the node name of the given html node.
|
||||
// See NodeName for additional details on behaviour.
|
||||
func nodeName(node *html.Node) string {
|
||||
if node == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
switch node.Type {
|
||||
case html.ElementNode, html.DoctypeNode:
|
||||
return n.Data
|
||||
return node.Data
|
||||
default:
|
||||
if n.Type >= 0 && int(n.Type) < len(nodeNames) {
|
||||
return nodeNames[n.Type]
|
||||
if node.Type >= 0 && int(node.Type) < len(nodeNames) {
|
||||
return nodeNames[node.Type]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
|
6
vendor/github.com/andybalholm/cascadia/go.mod
generated
vendored
6
vendor/github.com/andybalholm/cascadia/go.mod
generated
vendored
|
@ -1,3 +1,5 @@
|
|||
module "github.com/andybalholm/cascadia"
|
||||
module github.com/andybalholm/cascadia
|
||||
|
||||
require "golang.org/x/net" v0.0.0-20180218175443-cbe0f9307d01
|
||||
require golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01
|
||||
|
||||
go 1.13
|
||||
|
|
315
vendor/github.com/andybalholm/cascadia/parser.go
generated
vendored
315
vendor/github.com/andybalholm/cascadia/parser.go
generated
vendored
|
@ -7,14 +7,16 @@ import (
|
|||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// a parser for CSS selectors
|
||||
type parser struct {
|
||||
s string // the source text
|
||||
i int // the current position
|
||||
|
||||
// if `false`, parsing a pseudo-element
|
||||
// returns an error.
|
||||
acceptPseudoElements bool
|
||||
}
|
||||
|
||||
// parseEscape parses a backslash escape.
|
||||
|
@ -31,7 +33,7 @@ func (p *parser) parseEscape() (result string, err error) {
|
|||
case hexDigit(c):
|
||||
// unicode escape (hex)
|
||||
var i int
|
||||
for i = start; i < p.i+6 && i < len(p.s) && hexDigit(p.s[i]); i++ {
|
||||
for i = start; i < start+6 && i < len(p.s) && hexDigit(p.s[i]); i++ {
|
||||
// empty
|
||||
}
|
||||
v, _ := strconv.ParseUint(p.s[start:i], 16, 21)
|
||||
|
@ -56,6 +58,26 @@ func (p *parser) parseEscape() (result string, err error) {
|
|||
return result, nil
|
||||
}
|
||||
|
||||
// toLowerASCII returns s with all ASCII capital letters lowercased.
|
||||
func toLowerASCII(s string) string {
|
||||
var b []byte
|
||||
for i := 0; i < len(s); i++ {
|
||||
if c := s[i]; 'A' <= c && c <= 'Z' {
|
||||
if b == nil {
|
||||
b = make([]byte, len(s))
|
||||
copy(b, s)
|
||||
}
|
||||
b[i] = s[i] + ('a' - 'A')
|
||||
}
|
||||
}
|
||||
|
||||
if b == nil {
|
||||
return s
|
||||
}
|
||||
|
||||
return string(b)
|
||||
}
|
||||
|
||||
func hexDigit(c byte) bool {
|
||||
return '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F'
|
||||
}
|
||||
|
@ -280,92 +302,92 @@ func (p *parser) consumeClosingParenthesis() bool {
|
|||
}
|
||||
|
||||
// parseTypeSelector parses a type selector (one that matches by tag name).
|
||||
func (p *parser) parseTypeSelector() (result Selector, err error) {
|
||||
func (p *parser) parseTypeSelector() (result tagSelector, err error) {
|
||||
tag, err := p.parseIdentifier()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return
|
||||
}
|
||||
|
||||
return typeSelector(tag), nil
|
||||
return tagSelector{tag: toLowerASCII(tag)}, nil
|
||||
}
|
||||
|
||||
// parseIDSelector parses a selector that matches by id attribute.
|
||||
func (p *parser) parseIDSelector() (Selector, error) {
|
||||
func (p *parser) parseIDSelector() (idSelector, error) {
|
||||
if p.i >= len(p.s) {
|
||||
return nil, fmt.Errorf("expected id selector (#id), found EOF instead")
|
||||
return idSelector{}, fmt.Errorf("expected id selector (#id), found EOF instead")
|
||||
}
|
||||
if p.s[p.i] != '#' {
|
||||
return nil, fmt.Errorf("expected id selector (#id), found '%c' instead", p.s[p.i])
|
||||
return idSelector{}, fmt.Errorf("expected id selector (#id), found '%c' instead", p.s[p.i])
|
||||
}
|
||||
|
||||
p.i++
|
||||
id, err := p.parseName()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return idSelector{}, err
|
||||
}
|
||||
|
||||
return attributeEqualsSelector("id", id), nil
|
||||
return idSelector{id: id}, nil
|
||||
}
|
||||
|
||||
// parseClassSelector parses a selector that matches by class attribute.
|
||||
func (p *parser) parseClassSelector() (Selector, error) {
|
||||
func (p *parser) parseClassSelector() (classSelector, error) {
|
||||
if p.i >= len(p.s) {
|
||||
return nil, fmt.Errorf("expected class selector (.class), found EOF instead")
|
||||
return classSelector{}, fmt.Errorf("expected class selector (.class), found EOF instead")
|
||||
}
|
||||
if p.s[p.i] != '.' {
|
||||
return nil, fmt.Errorf("expected class selector (.class), found '%c' instead", p.s[p.i])
|
||||
return classSelector{}, fmt.Errorf("expected class selector (.class), found '%c' instead", p.s[p.i])
|
||||
}
|
||||
|
||||
p.i++
|
||||
class, err := p.parseIdentifier()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return classSelector{}, err
|
||||
}
|
||||
|
||||
return attributeIncludesSelector("class", class), nil
|
||||
return classSelector{class: class}, nil
|
||||
}
|
||||
|
||||
// parseAttributeSelector parses a selector that matches by attribute value.
|
||||
func (p *parser) parseAttributeSelector() (Selector, error) {
|
||||
func (p *parser) parseAttributeSelector() (attrSelector, error) {
|
||||
if p.i >= len(p.s) {
|
||||
return nil, fmt.Errorf("expected attribute selector ([attribute]), found EOF instead")
|
||||
return attrSelector{}, fmt.Errorf("expected attribute selector ([attribute]), found EOF instead")
|
||||
}
|
||||
if p.s[p.i] != '[' {
|
||||
return nil, fmt.Errorf("expected attribute selector ([attribute]), found '%c' instead", p.s[p.i])
|
||||
return attrSelector{}, fmt.Errorf("expected attribute selector ([attribute]), found '%c' instead", p.s[p.i])
|
||||
}
|
||||
|
||||
p.i++
|
||||
p.skipWhitespace()
|
||||
key, err := p.parseIdentifier()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return attrSelector{}, err
|
||||
}
|
||||
key = toLowerASCII(key)
|
||||
|
||||
p.skipWhitespace()
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in attribute selector")
|
||||
return attrSelector{}, errors.New("unexpected EOF in attribute selector")
|
||||
}
|
||||
|
||||
if p.s[p.i] == ']' {
|
||||
p.i++
|
||||
return attributeExistsSelector(key), nil
|
||||
return attrSelector{key: key, operation: ""}, nil
|
||||
}
|
||||
|
||||
if p.i+2 >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in attribute selector")
|
||||
return attrSelector{}, errors.New("unexpected EOF in attribute selector")
|
||||
}
|
||||
|
||||
op := p.s[p.i : p.i+2]
|
||||
if op[0] == '=' {
|
||||
op = "="
|
||||
} else if op[1] != '=' {
|
||||
return nil, fmt.Errorf(`expected equality operator, found "%s" instead`, op)
|
||||
return attrSelector{}, fmt.Errorf(`expected equality operator, found "%s" instead`, op)
|
||||
}
|
||||
p.i += len(op)
|
||||
|
||||
p.skipWhitespace()
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in attribute selector")
|
||||
return attrSelector{}, errors.New("unexpected EOF in attribute selector")
|
||||
}
|
||||
var val string
|
||||
var rx *regexp.Regexp
|
||||
|
@ -380,88 +402,84 @@ func (p *parser) parseAttributeSelector() (Selector, error) {
|
|||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return attrSelector{}, err
|
||||
}
|
||||
|
||||
p.skipWhitespace()
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in attribute selector")
|
||||
return attrSelector{}, errors.New("unexpected EOF in attribute selector")
|
||||
}
|
||||
if p.s[p.i] != ']' {
|
||||
return nil, fmt.Errorf("expected ']', found '%c' instead", p.s[p.i])
|
||||
return attrSelector{}, fmt.Errorf("expected ']', found '%c' instead", p.s[p.i])
|
||||
}
|
||||
p.i++
|
||||
|
||||
switch op {
|
||||
case "=":
|
||||
return attributeEqualsSelector(key, val), nil
|
||||
case "!=":
|
||||
return attributeNotEqualSelector(key, val), nil
|
||||
case "~=":
|
||||
return attributeIncludesSelector(key, val), nil
|
||||
case "|=":
|
||||
return attributeDashmatchSelector(key, val), nil
|
||||
case "^=":
|
||||
return attributePrefixSelector(key, val), nil
|
||||
case "$=":
|
||||
return attributeSuffixSelector(key, val), nil
|
||||
case "*=":
|
||||
return attributeSubstringSelector(key, val), nil
|
||||
case "#=":
|
||||
return attributeRegexSelector(key, rx), nil
|
||||
case "=", "!=", "~=", "|=", "^=", "$=", "*=", "#=":
|
||||
return attrSelector{key: key, val: val, operation: op, regexp: rx}, nil
|
||||
default:
|
||||
return attrSelector{}, fmt.Errorf("attribute operator %q is not supported", op)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("attribute operator %q is not supported", op)
|
||||
}
|
||||
|
||||
var errExpectedParenthesis = errors.New("expected '(' but didn't find it")
|
||||
var errExpectedClosingParenthesis = errors.New("expected ')' but didn't find it")
|
||||
var errUnmatchedParenthesis = errors.New("unmatched '('")
|
||||
|
||||
// parsePseudoclassSelector parses a pseudoclass selector like :not(p).
|
||||
func (p *parser) parsePseudoclassSelector() (Selector, error) {
|
||||
// parsePseudoclassSelector parses a pseudoclass selector like :not(p) or a pseudo-element
|
||||
// For backwards compatibility, both ':' and '::' prefix are allowed for pseudo-elements.
|
||||
// https://drafts.csswg.org/selectors-3/#pseudo-elements
|
||||
// Returning a nil `Sel` (and a nil `error`) means we found a pseudo-element.
|
||||
func (p *parser) parsePseudoclassSelector() (out Sel, pseudoElement string, err error) {
|
||||
if p.i >= len(p.s) {
|
||||
return nil, fmt.Errorf("expected pseudoclass selector (:pseudoclass), found EOF instead")
|
||||
return nil, "", fmt.Errorf("expected pseudoclass selector (:pseudoclass), found EOF instead")
|
||||
}
|
||||
if p.s[p.i] != ':' {
|
||||
return nil, fmt.Errorf("expected attribute selector (:pseudoclass), found '%c' instead", p.s[p.i])
|
||||
return nil, "", fmt.Errorf("expected attribute selector (:pseudoclass), found '%c' instead", p.s[p.i])
|
||||
}
|
||||
|
||||
p.i++
|
||||
var mustBePseudoElement bool
|
||||
if p.i >= len(p.s) {
|
||||
return nil, "", fmt.Errorf("got empty pseudoclass (or pseudoelement)")
|
||||
}
|
||||
if p.s[p.i] == ':' { // we found a pseudo-element
|
||||
mustBePseudoElement = true
|
||||
p.i++
|
||||
}
|
||||
|
||||
name, err := p.parseIdentifier()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return
|
||||
}
|
||||
name = toLowerASCII(name)
|
||||
if mustBePseudoElement && (name != "after" && name != "backdrop" && name != "before" &&
|
||||
name != "cue" && name != "first-letter" && name != "first-line" && name != "grammar-error" &&
|
||||
name != "marker" && name != "placeholder" && name != "selection" && name != "spelling-error") {
|
||||
return out, "", fmt.Errorf("unknown pseudoelement :%s", name)
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "not", "has", "haschild":
|
||||
if !p.consumeParenthesis() {
|
||||
return nil, errExpectedParenthesis
|
||||
return out, "", errExpectedParenthesis
|
||||
}
|
||||
sel, parseErr := p.parseSelectorGroup()
|
||||
if parseErr != nil {
|
||||
return nil, parseErr
|
||||
return out, "", parseErr
|
||||
}
|
||||
if !p.consumeClosingParenthesis() {
|
||||
return nil, errExpectedClosingParenthesis
|
||||
return out, "", errExpectedClosingParenthesis
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "not":
|
||||
return negatedSelector(sel), nil
|
||||
case "has":
|
||||
return hasDescendantSelector(sel), nil
|
||||
case "haschild":
|
||||
return hasChildSelector(sel), nil
|
||||
}
|
||||
out = relativePseudoClassSelector{name: name, match: sel}
|
||||
|
||||
case "contains", "containsown":
|
||||
if !p.consumeParenthesis() {
|
||||
return nil, errExpectedParenthesis
|
||||
return out, "", errExpectedParenthesis
|
||||
}
|
||||
if p.i == len(p.s) {
|
||||
return nil, errUnmatchedParenthesis
|
||||
return out, "", errUnmatchedParenthesis
|
||||
}
|
||||
var val string
|
||||
switch p.s[p.i] {
|
||||
|
@ -471,95 +489,75 @@ func (p *parser) parsePseudoclassSelector() (Selector, error) {
|
|||
val, err = p.parseIdentifier()
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return out, "", err
|
||||
}
|
||||
val = strings.ToLower(val)
|
||||
p.skipWhitespace()
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in pseudo selector")
|
||||
return out, "", errors.New("unexpected EOF in pseudo selector")
|
||||
}
|
||||
if !p.consumeClosingParenthesis() {
|
||||
return nil, errExpectedClosingParenthesis
|
||||
return out, "", errExpectedClosingParenthesis
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "contains":
|
||||
return textSubstrSelector(val), nil
|
||||
case "containsown":
|
||||
return ownTextSubstrSelector(val), nil
|
||||
}
|
||||
out = containsPseudoClassSelector{own: name == "containsown", value: val}
|
||||
|
||||
case "matches", "matchesown":
|
||||
if !p.consumeParenthesis() {
|
||||
return nil, errExpectedParenthesis
|
||||
return out, "", errExpectedParenthesis
|
||||
}
|
||||
rx, err := p.parseRegex()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return out, "", err
|
||||
}
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("unexpected EOF in pseudo selector")
|
||||
return out, "", errors.New("unexpected EOF in pseudo selector")
|
||||
}
|
||||
if !p.consumeClosingParenthesis() {
|
||||
return nil, errExpectedClosingParenthesis
|
||||
return out, "", errExpectedClosingParenthesis
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "matches":
|
||||
return textRegexSelector(rx), nil
|
||||
case "matchesown":
|
||||
return ownTextRegexSelector(rx), nil
|
||||
}
|
||||
out = regexpPseudoClassSelector{own: name == "matchesown", regexp: rx}
|
||||
|
||||
case "nth-child", "nth-last-child", "nth-of-type", "nth-last-of-type":
|
||||
if !p.consumeParenthesis() {
|
||||
return nil, errExpectedParenthesis
|
||||
return out, "", errExpectedParenthesis
|
||||
}
|
||||
a, b, err := p.parseNth()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return out, "", err
|
||||
}
|
||||
if !p.consumeClosingParenthesis() {
|
||||
return nil, errExpectedClosingParenthesis
|
||||
return out, "", errExpectedClosingParenthesis
|
||||
}
|
||||
if a == 0 {
|
||||
switch name {
|
||||
case "nth-child":
|
||||
return simpleNthChildSelector(b, false), nil
|
||||
case "nth-of-type":
|
||||
return simpleNthChildSelector(b, true), nil
|
||||
case "nth-last-child":
|
||||
return simpleNthLastChildSelector(b, false), nil
|
||||
case "nth-last-of-type":
|
||||
return simpleNthLastChildSelector(b, true), nil
|
||||
}
|
||||
}
|
||||
return nthChildSelector(a, b,
|
||||
name == "nth-last-child" || name == "nth-last-of-type",
|
||||
name == "nth-of-type" || name == "nth-last-of-type"),
|
||||
nil
|
||||
last := name == "nth-last-child" || name == "nth-last-of-type"
|
||||
ofType := name == "nth-of-type" || name == "nth-last-of-type"
|
||||
out = nthPseudoClassSelector{a: a, b: b, last: last, ofType: ofType}
|
||||
|
||||
case "first-child":
|
||||
return simpleNthChildSelector(1, false), nil
|
||||
out = nthPseudoClassSelector{a: 0, b: 1, ofType: false, last: false}
|
||||
case "last-child":
|
||||
return simpleNthLastChildSelector(1, false), nil
|
||||
out = nthPseudoClassSelector{a: 0, b: 1, ofType: false, last: true}
|
||||
case "first-of-type":
|
||||
return simpleNthChildSelector(1, true), nil
|
||||
out = nthPseudoClassSelector{a: 0, b: 1, ofType: true, last: false}
|
||||
case "last-of-type":
|
||||
return simpleNthLastChildSelector(1, true), nil
|
||||
out = nthPseudoClassSelector{a: 0, b: 1, ofType: true, last: true}
|
||||
case "only-child":
|
||||
return onlyChildSelector(false), nil
|
||||
out = onlyChildPseudoClassSelector{ofType: false}
|
||||
case "only-of-type":
|
||||
return onlyChildSelector(true), nil
|
||||
out = onlyChildPseudoClassSelector{ofType: true}
|
||||
case "input":
|
||||
return inputSelector, nil
|
||||
out = inputPseudoClassSelector{}
|
||||
case "empty":
|
||||
return emptyElementSelector, nil
|
||||
out = emptyElementPseudoClassSelector{}
|
||||
case "root":
|
||||
return rootSelector, nil
|
||||
out = rootPseudoClassSelector{}
|
||||
case "after", "backdrop", "before", "cue", "first-letter", "first-line", "grammar-error", "marker", "placeholder", "selection", "spelling-error":
|
||||
return nil, name, nil
|
||||
default:
|
||||
return out, "", fmt.Errorf("unknown pseudoclass or pseudoelement :%s", name)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("unknown pseudoclass :%s", name)
|
||||
return
|
||||
}
|
||||
|
||||
// parseInteger parses a decimal integer.
|
||||
|
@ -705,8 +703,8 @@ invalid:
|
|||
|
||||
// parseSimpleSelectorSequence parses a selector sequence that applies to
|
||||
// a single element.
|
||||
func (p *parser) parseSimpleSelectorSequence() (Selector, error) {
|
||||
var result Selector
|
||||
func (p *parser) parseSimpleSelectorSequence() (Sel, error) {
|
||||
var selectors []Sel
|
||||
|
||||
if p.i >= len(p.s) {
|
||||
return nil, errors.New("expected selector, found EOF instead")
|
||||
|
@ -723,13 +721,17 @@ func (p *parser) parseSimpleSelectorSequence() (Selector, error) {
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = r
|
||||
selectors = append(selectors, r)
|
||||
}
|
||||
|
||||
var pseudoElement string
|
||||
loop:
|
||||
for p.i < len(p.s) {
|
||||
var ns Selector
|
||||
var err error
|
||||
var (
|
||||
ns Sel
|
||||
newPseudoElement string
|
||||
err error
|
||||
)
|
||||
switch p.s[p.i] {
|
||||
case '#':
|
||||
ns, err = p.parseIDSelector()
|
||||
|
@ -738,44 +740,57 @@ loop:
|
|||
case '[':
|
||||
ns, err = p.parseAttributeSelector()
|
||||
case ':':
|
||||
ns, err = p.parsePseudoclassSelector()
|
||||
ns, newPseudoElement, err = p.parsePseudoclassSelector()
|
||||
default:
|
||||
break loop
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if result == nil {
|
||||
result = ns
|
||||
// From https://drafts.csswg.org/selectors-3/#pseudo-elements :
|
||||
// "Only one pseudo-element may appear per selector, and if present
|
||||
// it must appear after the sequence of simple selectors that
|
||||
// represents the subjects of the selector.""
|
||||
if ns == nil { // we found a pseudo-element
|
||||
if pseudoElement != "" {
|
||||
return nil, fmt.Errorf("only one pseudo-element is accepted per selector, got %s and %s", pseudoElement, newPseudoElement)
|
||||
}
|
||||
if !p.acceptPseudoElements {
|
||||
return nil, fmt.Errorf("pseudo-element %s found, but pseudo-elements support is disabled", newPseudoElement)
|
||||
}
|
||||
pseudoElement = newPseudoElement
|
||||
} else {
|
||||
result = intersectionSelector(result, ns)
|
||||
if pseudoElement != "" {
|
||||
return nil, fmt.Errorf("pseudo-element %s must be at the end of selector", pseudoElement)
|
||||
}
|
||||
selectors = append(selectors, ns)
|
||||
}
|
||||
|
||||
if result == nil {
|
||||
result = func(n *html.Node) bool {
|
||||
return n.Type == html.ElementNode
|
||||
}
|
||||
if len(selectors) == 1 && pseudoElement == "" { // no need wrap the selectors in compoundSelector
|
||||
return selectors[0], nil
|
||||
}
|
||||
|
||||
return result, nil
|
||||
return compoundSelector{selectors: selectors, pseudoElement: pseudoElement}, nil
|
||||
}
|
||||
|
||||
// parseSelector parses a selector that may include combinators.
|
||||
func (p *parser) parseSelector() (result Selector, err error) {
|
||||
func (p *parser) parseSelector() (Sel, error) {
|
||||
p.skipWhitespace()
|
||||
result, err = p.parseSimpleSelectorSequence()
|
||||
result, err := p.parseSimpleSelectorSequence()
|
||||
if err != nil {
|
||||
return
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for {
|
||||
var combinator byte
|
||||
var (
|
||||
combinator byte
|
||||
c Sel
|
||||
)
|
||||
if p.skipWhitespace() {
|
||||
combinator = ' '
|
||||
}
|
||||
if p.i >= len(p.s) {
|
||||
return
|
||||
return result, nil
|
||||
}
|
||||
|
||||
switch p.s[p.i] {
|
||||
|
@ -785,51 +800,39 @@ func (p *parser) parseSelector() (result Selector, err error) {
|
|||
p.skipWhitespace()
|
||||
case ',', ')':
|
||||
// These characters can't begin a selector, but they can legally occur after one.
|
||||
return
|
||||
return result, nil
|
||||
}
|
||||
|
||||
if combinator == 0 {
|
||||
return
|
||||
return result, nil
|
||||
}
|
||||
|
||||
c, err := p.parseSimpleSelectorSequence()
|
||||
c, err = p.parseSimpleSelectorSequence()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch combinator {
|
||||
case ' ':
|
||||
result = descendantSelector(result, c)
|
||||
case '>':
|
||||
result = childSelector(result, c)
|
||||
case '+':
|
||||
result = siblingSelector(result, c, true)
|
||||
case '~':
|
||||
result = siblingSelector(result, c, false)
|
||||
result = combinedSelector{first: result, combinator: combinator, second: c}
|
||||
}
|
||||
}
|
||||
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// parseSelectorGroup parses a group of selectors, separated by commas.
|
||||
func (p *parser) parseSelectorGroup() (result Selector, err error) {
|
||||
result, err = p.parseSelector()
|
||||
func (p *parser) parseSelectorGroup() (SelectorGroup, error) {
|
||||
current, err := p.parseSelector()
|
||||
if err != nil {
|
||||
return
|
||||
return nil, err
|
||||
}
|
||||
result := SelectorGroup{current}
|
||||
|
||||
for p.i < len(p.s) {
|
||||
if p.s[p.i] != ',' {
|
||||
return result, nil
|
||||
break
|
||||
}
|
||||
p.i++
|
||||
c, err := p.parseSelector()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = unionSelector(result, c)
|
||||
result = append(result, c)
|
||||
}
|
||||
|
||||
return
|
||||
return result, nil
|
||||
}
|
||||
|
|
790
vendor/github.com/andybalholm/cascadia/selector.go
generated
vendored
790
vendor/github.com/andybalholm/cascadia/selector.go
generated
vendored
|
@ -9,36 +9,60 @@ import (
|
|||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// the Selector type, and functions for creating them
|
||||
|
||||
// A Selector is a function which tells whether a node matches or not.
|
||||
type Selector func(*html.Node) bool
|
||||
|
||||
// hasChildMatch returns whether n has any child that matches a.
|
||||
func hasChildMatch(n *html.Node, a Selector) bool {
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
if a(c) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
// Matcher is the interface for basic selector functionality.
|
||||
// Match returns whether a selector matches n.
|
||||
type Matcher interface {
|
||||
Match(n *html.Node) bool
|
||||
}
|
||||
|
||||
// hasDescendantMatch performs a depth-first search of n's descendants,
|
||||
// testing whether any of them match a. It returns true as soon as a match is
|
||||
// found, or false if no match is found.
|
||||
func hasDescendantMatch(n *html.Node, a Selector) bool {
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
if a(c) || (c.Type == html.ElementNode && hasDescendantMatch(c, a)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
// Sel is the interface for all the functionality provided by selectors.
|
||||
type Sel interface {
|
||||
Matcher
|
||||
Specificity() Specificity
|
||||
|
||||
// Returns a CSS input compiling to this selector.
|
||||
String() string
|
||||
|
||||
// Returns a pseudo-element, or an empty string.
|
||||
PseudoElement() string
|
||||
}
|
||||
|
||||
// Compile parses a selector and returns, if successful, a Selector object
|
||||
// that can be used to match against html.Node objects.
|
||||
func Compile(sel string) (Selector, error) {
|
||||
// Parse parses a selector. Use `ParseWithPseudoElement`
|
||||
// if you need support for pseudo-elements.
|
||||
func Parse(sel string) (Sel, error) {
|
||||
p := &parser{s: sel}
|
||||
compiled, err := p.parseSelector()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if p.i < len(sel) {
|
||||
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
|
||||
}
|
||||
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
// ParseWithPseudoElement parses a single selector,
|
||||
// with support for pseudo-element.
|
||||
func ParseWithPseudoElement(sel string) (Sel, error) {
|
||||
p := &parser{s: sel, acceptPseudoElements: true}
|
||||
compiled, err := p.parseSelector()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if p.i < len(sel) {
|
||||
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
|
||||
}
|
||||
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
// ParseGroup parses a selector, or a group of selectors separated by commas.
|
||||
// Use `ParseGroupWithPseudoElements`
|
||||
// if you need support for pseudo-elements.
|
||||
func ParseGroup(sel string) (SelectorGroup, error) {
|
||||
p := &parser{s: sel}
|
||||
compiled, err := p.parseSelectorGroup()
|
||||
if err != nil {
|
||||
|
@ -52,6 +76,39 @@ func Compile(sel string) (Selector, error) {
|
|||
return compiled, nil
|
||||
}
|
||||
|
||||
// ParseGroupWithPseudoElements parses a selector, or a group of selectors separated by commas.
|
||||
// It supports pseudo-elements.
|
||||
func ParseGroupWithPseudoElements(sel string) (SelectorGroup, error) {
|
||||
p := &parser{s: sel, acceptPseudoElements: true}
|
||||
compiled, err := p.parseSelectorGroup()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if p.i < len(sel) {
|
||||
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
|
||||
}
|
||||
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
// A Selector is a function which tells whether a node matches or not.
|
||||
//
|
||||
// This type is maintained for compatibility; I recommend using the newer and
|
||||
// more idiomatic interfaces Sel and Matcher.
|
||||
type Selector func(*html.Node) bool
|
||||
|
||||
// Compile parses a selector and returns, if successful, a Selector object
|
||||
// that can be used to match against html.Node objects.
|
||||
func Compile(sel string) (Selector, error) {
|
||||
compiled, err := ParseGroup(sel)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return Selector(compiled.Match), nil
|
||||
}
|
||||
|
||||
// MustCompile is like Compile, but panics instead of returning an error.
|
||||
func MustCompile(sel string) Selector {
|
||||
compiled, err := Compile(sel)
|
||||
|
@ -79,6 +136,23 @@ func (s Selector) matchAllInto(n *html.Node, storage []*html.Node) []*html.Node
|
|||
return storage
|
||||
}
|
||||
|
||||
func queryInto(n *html.Node, m Matcher, storage []*html.Node) []*html.Node {
|
||||
for child := n.FirstChild; child != nil; child = child.NextSibling {
|
||||
if m.Match(child) {
|
||||
storage = append(storage, child)
|
||||
}
|
||||
storage = queryInto(child, m, storage)
|
||||
}
|
||||
|
||||
return storage
|
||||
}
|
||||
|
||||
// QueryAll returns a slice of all the nodes that match m, from the descendants
|
||||
// of n.
|
||||
func QueryAll(n *html.Node, m Matcher) []*html.Node {
|
||||
return queryInto(n, m, nil)
|
||||
}
|
||||
|
||||
// Match returns true if the node matches the selector.
|
||||
func (s Selector) Match(n *html.Node) bool {
|
||||
return s(n)
|
||||
|
@ -99,6 +173,21 @@ func (s Selector) MatchFirst(n *html.Node) *html.Node {
|
|||
return nil
|
||||
}
|
||||
|
||||
// Query returns the first node that matches m, from the descendants of n.
|
||||
// If none matches, it returns nil.
|
||||
func Query(n *html.Node, m Matcher) *html.Node {
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
if m.Match(c) {
|
||||
return c
|
||||
}
|
||||
if matched := Query(c, m); matched != nil {
|
||||
return matched
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Filter returns the nodes in nodes that match the selector.
|
||||
func (s Selector) Filter(nodes []*html.Node) (result []*html.Node) {
|
||||
for _, n := range nodes {
|
||||
|
@ -109,39 +198,105 @@ func (s Selector) Filter(nodes []*html.Node) (result []*html.Node) {
|
|||
return result
|
||||
}
|
||||
|
||||
// typeSelector returns a Selector that matches elements with a given tag name.
|
||||
func typeSelector(tag string) Selector {
|
||||
tag = toLowerASCII(tag)
|
||||
return func(n *html.Node) bool {
|
||||
return n.Type == html.ElementNode && n.Data == tag
|
||||
// Filter returns the nodes that match m.
|
||||
func Filter(nodes []*html.Node, m Matcher) (result []*html.Node) {
|
||||
for _, n := range nodes {
|
||||
if m.Match(n) {
|
||||
result = append(result, n)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
type tagSelector struct {
|
||||
tag string
|
||||
}
|
||||
|
||||
// Matches elements with a given tag name.
|
||||
func (t tagSelector) Match(n *html.Node) bool {
|
||||
return n.Type == html.ElementNode && n.Data == t.tag
|
||||
}
|
||||
|
||||
func (c tagSelector) Specificity() Specificity {
|
||||
return Specificity{0, 0, 1}
|
||||
}
|
||||
|
||||
func (c tagSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type classSelector struct {
|
||||
class string
|
||||
}
|
||||
|
||||
// Matches elements by class attribute.
|
||||
func (t classSelector) Match(n *html.Node) bool {
|
||||
return matchAttribute(n, "class", func(s string) bool {
|
||||
return matchInclude(t.class, s)
|
||||
})
|
||||
}
|
||||
|
||||
func (c classSelector) Specificity() Specificity {
|
||||
return Specificity{0, 1, 0}
|
||||
}
|
||||
|
||||
func (c classSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type idSelector struct {
|
||||
id string
|
||||
}
|
||||
|
||||
// Matches elements by id attribute.
|
||||
func (t idSelector) Match(n *html.Node) bool {
|
||||
return matchAttribute(n, "id", func(s string) bool {
|
||||
return s == t.id
|
||||
})
|
||||
}
|
||||
|
||||
func (c idSelector) Specificity() Specificity {
|
||||
return Specificity{1, 0, 0}
|
||||
}
|
||||
|
||||
func (c idSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type attrSelector struct {
|
||||
key, val, operation string
|
||||
regexp *regexp.Regexp
|
||||
}
|
||||
|
||||
// Matches elements by attribute value.
|
||||
func (t attrSelector) Match(n *html.Node) bool {
|
||||
switch t.operation {
|
||||
case "":
|
||||
return matchAttribute(n, t.key, func(string) bool { return true })
|
||||
case "=":
|
||||
return matchAttribute(n, t.key, func(s string) bool { return s == t.val })
|
||||
case "!=":
|
||||
return attributeNotEqualMatch(t.key, t.val, n)
|
||||
case "~=":
|
||||
// matches elements where the attribute named key is a whitespace-separated list that includes val.
|
||||
return matchAttribute(n, t.key, func(s string) bool { return matchInclude(t.val, s) })
|
||||
case "|=":
|
||||
return attributeDashMatch(t.key, t.val, n)
|
||||
case "^=":
|
||||
return attributePrefixMatch(t.key, t.val, n)
|
||||
case "$=":
|
||||
return attributeSuffixMatch(t.key, t.val, n)
|
||||
case "*=":
|
||||
return attributeSubstringMatch(t.key, t.val, n)
|
||||
case "#=":
|
||||
return attributeRegexMatch(t.key, t.regexp, n)
|
||||
default:
|
||||
panic(fmt.Sprintf("unsuported operation : %s", t.operation))
|
||||
}
|
||||
}
|
||||
|
||||
// toLowerASCII returns s with all ASCII capital letters lowercased.
|
||||
func toLowerASCII(s string) string {
|
||||
var b []byte
|
||||
for i := 0; i < len(s); i++ {
|
||||
if c := s[i]; 'A' <= c && c <= 'Z' {
|
||||
if b == nil {
|
||||
b = make([]byte, len(s))
|
||||
copy(b, s)
|
||||
}
|
||||
b[i] = s[i] + ('a' - 'A')
|
||||
}
|
||||
}
|
||||
|
||||
if b == nil {
|
||||
return s
|
||||
}
|
||||
|
||||
return string(b)
|
||||
}
|
||||
|
||||
// attributeSelector returns a Selector that matches elements
|
||||
// where the attribute named key satisifes the function f.
|
||||
func attributeSelector(key string, f func(string) bool) Selector {
|
||||
key = toLowerASCII(key)
|
||||
return func(n *html.Node) bool {
|
||||
// matches elements where the attribute named key satisifes the function f.
|
||||
func matchAttribute(n *html.Node, key string, f func(string) bool) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
@ -152,28 +307,10 @@ func attributeSelector(key string, f func(string) bool) Selector {
|
|||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// attributeExistsSelector returns a Selector that matches elements that have
|
||||
// an attribute named key.
|
||||
func attributeExistsSelector(key string) Selector {
|
||||
return attributeSelector(key, func(string) bool { return true })
|
||||
}
|
||||
|
||||
// attributeEqualsSelector returns a Selector that matches elements where
|
||||
// the attribute named key has the value val.
|
||||
func attributeEqualsSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func(s string) bool {
|
||||
return s == val
|
||||
})
|
||||
}
|
||||
|
||||
// attributeNotEqualSelector returns a Selector that matches elements where
|
||||
// attributeNotEqualMatch matches elements where
|
||||
// the attribute named key does not have the value val.
|
||||
func attributeNotEqualSelector(key, val string) Selector {
|
||||
key = toLowerASCII(key)
|
||||
return func(n *html.Node) bool {
|
||||
func attributeNotEqualMatch(key, val string, n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
@ -184,13 +321,9 @@ func attributeNotEqualSelector(key, val string) Selector {
|
|||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// attributeIncludesSelector returns a Selector that matches elements where
|
||||
// the attribute named key is a whitespace-separated list that includes val.
|
||||
func attributeIncludesSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func(s string) bool {
|
||||
// returns true if s is a whitespace-separated list that includes val.
|
||||
func matchInclude(val, s string) bool {
|
||||
for s != "" {
|
||||
i := strings.IndexAny(s, " \t\r\n\f")
|
||||
if i == -1 {
|
||||
|
@ -202,13 +335,11 @@ func attributeIncludesSelector(key, val string) Selector {
|
|||
s = s[i+1:]
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// attributeDashmatchSelector returns a Selector that matches elements where
|
||||
// the attribute named key equals val or starts with val plus a hyphen.
|
||||
func attributeDashmatchSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
// matches elements where the attribute named key equals val or starts with val plus a hyphen.
|
||||
func attributeDashMatch(key, val string, n *html.Node) bool {
|
||||
return matchAttribute(n, key,
|
||||
func(s string) bool {
|
||||
if s == val {
|
||||
return true
|
||||
|
@ -223,10 +354,10 @@ func attributeDashmatchSelector(key, val string) Selector {
|
|||
})
|
||||
}
|
||||
|
||||
// attributePrefixSelector returns a Selector that matches elements where
|
||||
// attributePrefixMatch returns a Selector that matches elements where
|
||||
// the attribute named key starts with val.
|
||||
func attributePrefixSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func attributePrefixMatch(key, val string, n *html.Node) bool {
|
||||
return matchAttribute(n, key,
|
||||
func(s string) bool {
|
||||
if strings.TrimSpace(s) == "" {
|
||||
return false
|
||||
|
@ -235,10 +366,10 @@ func attributePrefixSelector(key, val string) Selector {
|
|||
})
|
||||
}
|
||||
|
||||
// attributeSuffixSelector returns a Selector that matches elements where
|
||||
// attributeSuffixMatch matches elements where
|
||||
// the attribute named key ends with val.
|
||||
func attributeSuffixSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func attributeSuffixMatch(key, val string, n *html.Node) bool {
|
||||
return matchAttribute(n, key,
|
||||
func(s string) bool {
|
||||
if strings.TrimSpace(s) == "" {
|
||||
return false
|
||||
|
@ -247,10 +378,10 @@ func attributeSuffixSelector(key, val string) Selector {
|
|||
})
|
||||
}
|
||||
|
||||
// attributeSubstringSelector returns a Selector that matches nodes where
|
||||
// attributeSubstringMatch matches nodes where
|
||||
// the attribute named key contains val.
|
||||
func attributeSubstringSelector(key, val string) Selector {
|
||||
return attributeSelector(key,
|
||||
func attributeSubstringMatch(key, val string, n *html.Node) bool {
|
||||
return matchAttribute(n, key,
|
||||
func(s string) bool {
|
||||
if strings.TrimSpace(s) == "" {
|
||||
return false
|
||||
|
@ -259,41 +390,132 @@ func attributeSubstringSelector(key, val string) Selector {
|
|||
})
|
||||
}
|
||||
|
||||
// attributeRegexSelector returns a Selector that matches nodes where
|
||||
// attributeRegexMatch matches nodes where
|
||||
// the attribute named key matches the regular expression rx
|
||||
func attributeRegexSelector(key string, rx *regexp.Regexp) Selector {
|
||||
return attributeSelector(key,
|
||||
func attributeRegexMatch(key string, rx *regexp.Regexp, n *html.Node) bool {
|
||||
return matchAttribute(n, key,
|
||||
func(s string) bool {
|
||||
return rx.MatchString(s)
|
||||
})
|
||||
}
|
||||
|
||||
// intersectionSelector returns a selector that matches nodes that match
|
||||
// both a and b.
|
||||
func intersectionSelector(a, b Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return a(n) && b(n)
|
||||
}
|
||||
func (c attrSelector) Specificity() Specificity {
|
||||
return Specificity{0, 1, 0}
|
||||
}
|
||||
|
||||
// unionSelector returns a selector that matches elements that match
|
||||
// either a or b.
|
||||
func unionSelector(a, b Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return a(n) || b(n)
|
||||
}
|
||||
func (c attrSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// negatedSelector returns a selector that matches elements that do not match a.
|
||||
func negatedSelector(a Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
// ---------------- Pseudo class selectors ----------------
|
||||
// we use severals concrete types of pseudo-class selectors
|
||||
|
||||
type relativePseudoClassSelector struct {
|
||||
name string // one of "not", "has", "haschild"
|
||||
match SelectorGroup
|
||||
}
|
||||
|
||||
func (s relativePseudoClassSelector) Match(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
return !a(n)
|
||||
switch s.name {
|
||||
case "not":
|
||||
// matches elements that do not match a.
|
||||
return !s.match.Match(n)
|
||||
case "has":
|
||||
// matches elements with any descendant that matches a.
|
||||
return hasDescendantMatch(n, s.match)
|
||||
case "haschild":
|
||||
// matches elements with a child that matches a.
|
||||
return hasChildMatch(n, s.match)
|
||||
default:
|
||||
panic(fmt.Sprintf("unsupported relative pseudo class selector : %s", s.name))
|
||||
}
|
||||
}
|
||||
|
||||
// hasChildMatch returns whether n has any child that matches a.
|
||||
func hasChildMatch(n *html.Node, a Matcher) bool {
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
if a.Match(c) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// hasDescendantMatch performs a depth-first search of n's descendants,
|
||||
// testing whether any of them match a. It returns true as soon as a match is
|
||||
// found, or false if no match is found.
|
||||
func hasDescendantMatch(n *html.Node, a Matcher) bool {
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
if a.Match(c) || (c.Type == html.ElementNode && hasDescendantMatch(c, a)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Specificity returns the specificity of the most specific selectors
|
||||
// in the pseudo-class arguments.
|
||||
// See https://www.w3.org/TR/selectors/#specificity-rules
|
||||
func (s relativePseudoClassSelector) Specificity() Specificity {
|
||||
var max Specificity
|
||||
for _, sel := range s.match {
|
||||
newSpe := sel.Specificity()
|
||||
if max.Less(newSpe) {
|
||||
max = newSpe
|
||||
}
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
func (c relativePseudoClassSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type containsPseudoClassSelector struct {
|
||||
own bool
|
||||
value string
|
||||
}
|
||||
|
||||
func (s containsPseudoClassSelector) Match(n *html.Node) bool {
|
||||
var text string
|
||||
if s.own {
|
||||
// matches nodes that directly contain the given text
|
||||
text = strings.ToLower(nodeOwnText(n))
|
||||
} else {
|
||||
// matches nodes that contain the given text.
|
||||
text = strings.ToLower(nodeText(n))
|
||||
}
|
||||
return strings.Contains(text, s.value)
|
||||
}
|
||||
|
||||
func (s containsPseudoClassSelector) Specificity() Specificity {
|
||||
return Specificity{0, 1, 0}
|
||||
}
|
||||
|
||||
func (c containsPseudoClassSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type regexpPseudoClassSelector struct {
|
||||
own bool
|
||||
regexp *regexp.Regexp
|
||||
}
|
||||
|
||||
func (s regexpPseudoClassSelector) Match(n *html.Node) bool {
|
||||
var text string
|
||||
if s.own {
|
||||
// matches nodes whose text directly matches the specified regular expression
|
||||
text = nodeOwnText(n)
|
||||
} else {
|
||||
// matches nodes whose text matches the specified regular expression
|
||||
text = nodeText(n)
|
||||
}
|
||||
return s.regexp.MatchString(text)
|
||||
}
|
||||
|
||||
// writeNodeText writes the text contained in n and its descendants to b.
|
||||
func writeNodeText(n *html.Node, b *bytes.Buffer) {
|
||||
switch n.Type {
|
||||
|
@ -325,67 +547,34 @@ func nodeOwnText(n *html.Node) string {
|
|||
return b.String()
|
||||
}
|
||||
|
||||
// textSubstrSelector returns a selector that matches nodes that
|
||||
// contain the given text.
|
||||
func textSubstrSelector(val string) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
text := strings.ToLower(nodeText(n))
|
||||
return strings.Contains(text, val)
|
||||
}
|
||||
func (s regexpPseudoClassSelector) Specificity() Specificity {
|
||||
return Specificity{0, 1, 0}
|
||||
}
|
||||
|
||||
// ownTextSubstrSelector returns a selector that matches nodes that
|
||||
// directly contain the given text
|
||||
func ownTextSubstrSelector(val string) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
text := strings.ToLower(nodeOwnText(n))
|
||||
return strings.Contains(text, val)
|
||||
}
|
||||
func (c regexpPseudoClassSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// textRegexSelector returns a selector that matches nodes whose text matches
|
||||
// the specified regular expression
|
||||
func textRegexSelector(rx *regexp.Regexp) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return rx.MatchString(nodeText(n))
|
||||
}
|
||||
type nthPseudoClassSelector struct {
|
||||
a, b int
|
||||
last, ofType bool
|
||||
}
|
||||
|
||||
// ownTextRegexSelector returns a selector that matches nodes whose text
|
||||
// directly matches the specified regular expression
|
||||
func ownTextRegexSelector(rx *regexp.Regexp) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return rx.MatchString(nodeOwnText(n))
|
||||
func (s nthPseudoClassSelector) Match(n *html.Node) bool {
|
||||
if s.a == 0 {
|
||||
if s.last {
|
||||
return simpleNthLastChildMatch(s.b, s.ofType, n)
|
||||
} else {
|
||||
return simpleNthChildMatch(s.b, s.ofType, n)
|
||||
}
|
||||
}
|
||||
return nthChildMatch(s.a, s.b, s.last, s.ofType, n)
|
||||
}
|
||||
|
||||
// hasChildSelector returns a selector that matches elements
|
||||
// with a child that matches a.
|
||||
func hasChildSelector(a Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
return hasChildMatch(n, a)
|
||||
}
|
||||
}
|
||||
|
||||
// hasDescendantSelector returns a selector that matches elements
|
||||
// with any descendant that matches a.
|
||||
func hasDescendantSelector(a Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
return hasDescendantMatch(n, a)
|
||||
}
|
||||
}
|
||||
|
||||
// nthChildSelector returns a selector that implements :nth-child(an+b).
|
||||
// nthChildMatch implements :nth-child(an+b).
|
||||
// If last is true, implements :nth-last-child instead.
|
||||
// If ofType is true, implements :nth-of-type instead.
|
||||
func nthChildSelector(a, b int, last, ofType bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
func nthChildMatch(a, b int, last, ofType bool, n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
@ -430,12 +619,10 @@ func nthChildSelector(a, b int, last, ofType bool) Selector {
|
|||
|
||||
return i%a == 0 && i/a >= 0
|
||||
}
|
||||
}
|
||||
|
||||
// simpleNthChildSelector returns a selector that implements :nth-child(b).
|
||||
// simpleNthChildMatch implements :nth-child(b).
|
||||
// If ofType is true, implements :nth-of-type instead.
|
||||
func simpleNthChildSelector(b int, ofType bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
func simpleNthChildMatch(b int, ofType bool, n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
@ -464,13 +651,10 @@ func simpleNthChildSelector(b int, ofType bool) Selector {
|
|||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// simpleNthLastChildSelector returns a selector that implements
|
||||
// :nth-last-child(b). If ofType is true, implements :nth-last-of-type
|
||||
// instead.
|
||||
func simpleNthLastChildSelector(b int, ofType bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
// simpleNthLastChildMatch implements :nth-last-child(b).
|
||||
// If ofType is true, implements :nth-last-of-type instead.
|
||||
func simpleNthLastChildMatch(b int, ofType bool, n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
@ -499,12 +683,24 @@ func simpleNthLastChildSelector(b int, ofType bool) Selector {
|
|||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Specificity for nth-child pseudo-class.
|
||||
// Does not support a list of selectors
|
||||
func (s nthPseudoClassSelector) Specificity() Specificity {
|
||||
return Specificity{0, 1, 0}
|
||||
}
|
||||
|
||||
// onlyChildSelector returns a selector that implements :only-child.
|
||||
// If ofType is true, it implements :only-of-type instead.
|
||||
func onlyChildSelector(ofType bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
func (c nthPseudoClassSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type onlyChildPseudoClassSelector struct {
|
||||
ofType bool
|
||||
}
|
||||
|
||||
// Match implements :only-child.
|
||||
// If `ofType` is true, it implements :only-of-type instead.
|
||||
func (s onlyChildPseudoClassSelector) Match(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
@ -520,7 +716,7 @@ func onlyChildSelector(ofType bool) Selector {
|
|||
|
||||
count := 0
|
||||
for c := parent.FirstChild; c != nil; c = c.NextSibling {
|
||||
if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) {
|
||||
if (c.Type != html.ElementNode) || (s.ofType && c.Data != n.Data) {
|
||||
continue
|
||||
}
|
||||
count++
|
||||
|
@ -531,15 +727,34 @@ func onlyChildSelector(ofType bool) Selector {
|
|||
|
||||
return count == 1
|
||||
}
|
||||
|
||||
func (s onlyChildPseudoClassSelector) Specificity() Specificity {
|
||||
return Specificity{0, 1, 0}
|
||||
}
|
||||
|
||||
// inputSelector is a Selector that matches input, select, textarea and button elements.
|
||||
func inputSelector(n *html.Node) bool {
|
||||
func (c onlyChildPseudoClassSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type inputPseudoClassSelector struct{}
|
||||
|
||||
// Matches input, select, textarea and button elements.
|
||||
func (s inputPseudoClassSelector) Match(n *html.Node) bool {
|
||||
return n.Type == html.ElementNode && (n.Data == "input" || n.Data == "select" || n.Data == "textarea" || n.Data == "button")
|
||||
}
|
||||
|
||||
// emptyElementSelector is a Selector that matches empty elements.
|
||||
func emptyElementSelector(n *html.Node) bool {
|
||||
func (s inputPseudoClassSelector) Specificity() Specificity {
|
||||
return Specificity{0, 1, 0}
|
||||
}
|
||||
|
||||
func (c inputPseudoClassSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type emptyElementPseudoClassSelector struct{}
|
||||
|
||||
// Matches empty elements.
|
||||
func (s emptyElementPseudoClassSelector) Match(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
@ -554,64 +769,18 @@ func emptyElementSelector(n *html.Node) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// descendantSelector returns a Selector that matches an element if
|
||||
// it matches d and has an ancestor that matches a.
|
||||
func descendantSelector(a, d Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if !d(n) {
|
||||
return false
|
||||
func (s emptyElementPseudoClassSelector) Specificity() Specificity {
|
||||
return Specificity{0, 1, 0}
|
||||
}
|
||||
|
||||
for p := n.Parent; p != nil; p = p.Parent {
|
||||
if a(p) {
|
||||
return true
|
||||
}
|
||||
func (c emptyElementPseudoClassSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
type rootPseudoClassSelector struct{}
|
||||
|
||||
// childSelector returns a Selector that matches an element if
|
||||
// it matches d and its parent matches a.
|
||||
func childSelector(a, d Selector) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
return d(n) && n.Parent != nil && a(n.Parent)
|
||||
}
|
||||
}
|
||||
|
||||
// siblingSelector returns a Selector that matches an element
|
||||
// if it matches s2 and in is preceded by an element that matches s1.
|
||||
// If adjacent is true, the sibling must be immediately before the element.
|
||||
func siblingSelector(s1, s2 Selector, adjacent bool) Selector {
|
||||
return func(n *html.Node) bool {
|
||||
if !s2(n) {
|
||||
return false
|
||||
}
|
||||
|
||||
if adjacent {
|
||||
for n = n.PrevSibling; n != nil; n = n.PrevSibling {
|
||||
if n.Type == html.TextNode || n.Type == html.CommentNode {
|
||||
continue
|
||||
}
|
||||
return s1(n)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Walk backwards looking for element that matches s1
|
||||
for c := n.PrevSibling; c != nil; c = c.PrevSibling {
|
||||
if s1(c) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// rootSelector implements :root
|
||||
func rootSelector(n *html.Node) bool {
|
||||
// Match implements :root
|
||||
func (s rootPseudoClassSelector) Match(n *html.Node) bool {
|
||||
if n.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
@ -620,3 +789,150 @@ func rootSelector(n *html.Node) bool {
|
|||
}
|
||||
return n.Parent.Type == html.DocumentNode
|
||||
}
|
||||
|
||||
func (s rootPseudoClassSelector) Specificity() Specificity {
|
||||
return Specificity{0, 1, 0}
|
||||
}
|
||||
|
||||
func (c rootPseudoClassSelector) PseudoElement() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type compoundSelector struct {
|
||||
selectors []Sel
|
||||
pseudoElement string
|
||||
}
|
||||
|
||||
// Matches elements if each sub-selectors matches.
|
||||
func (t compoundSelector) Match(n *html.Node) bool {
|
||||
if len(t.selectors) == 0 {
|
||||
return n.Type == html.ElementNode
|
||||
}
|
||||
|
||||
for _, sel := range t.selectors {
|
||||
if !sel.Match(n) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (s compoundSelector) Specificity() Specificity {
|
||||
var out Specificity
|
||||
for _, sel := range s.selectors {
|
||||
out = out.Add(sel.Specificity())
|
||||
}
|
||||
if s.pseudoElement != "" {
|
||||
// https://drafts.csswg.org/selectors-3/#specificity
|
||||
out = out.Add(Specificity{0, 0, 1})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (c compoundSelector) PseudoElement() string {
|
||||
return c.pseudoElement
|
||||
}
|
||||
|
||||
type combinedSelector struct {
|
||||
first Sel
|
||||
combinator byte
|
||||
second Sel
|
||||
}
|
||||
|
||||
func (t combinedSelector) Match(n *html.Node) bool {
|
||||
if t.first == nil {
|
||||
return false // maybe we should panic
|
||||
}
|
||||
switch t.combinator {
|
||||
case 0:
|
||||
return t.first.Match(n)
|
||||
case ' ':
|
||||
return descendantMatch(t.first, t.second, n)
|
||||
case '>':
|
||||
return childMatch(t.first, t.second, n)
|
||||
case '+':
|
||||
return siblingMatch(t.first, t.second, true, n)
|
||||
case '~':
|
||||
return siblingMatch(t.first, t.second, false, n)
|
||||
default:
|
||||
panic("unknown combinator")
|
||||
}
|
||||
}
|
||||
|
||||
// matches an element if it matches d and has an ancestor that matches a.
|
||||
func descendantMatch(a, d Matcher, n *html.Node) bool {
|
||||
if !d.Match(n) {
|
||||
return false
|
||||
}
|
||||
|
||||
for p := n.Parent; p != nil; p = p.Parent {
|
||||
if a.Match(p) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// matches an element if it matches d and its parent matches a.
|
||||
func childMatch(a, d Matcher, n *html.Node) bool {
|
||||
return d.Match(n) && n.Parent != nil && a.Match(n.Parent)
|
||||
}
|
||||
|
||||
// matches an element if it matches s2 and is preceded by an element that matches s1.
|
||||
// If adjacent is true, the sibling must be immediately before the element.
|
||||
func siblingMatch(s1, s2 Matcher, adjacent bool, n *html.Node) bool {
|
||||
if !s2.Match(n) {
|
||||
return false
|
||||
}
|
||||
|
||||
if adjacent {
|
||||
for n = n.PrevSibling; n != nil; n = n.PrevSibling {
|
||||
if n.Type == html.TextNode || n.Type == html.CommentNode {
|
||||
continue
|
||||
}
|
||||
return s1.Match(n)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Walk backwards looking for element that matches s1
|
||||
for c := n.PrevSibling; c != nil; c = c.PrevSibling {
|
||||
if s1.Match(c) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (s combinedSelector) Specificity() Specificity {
|
||||
spec := s.first.Specificity()
|
||||
if s.second != nil {
|
||||
spec = spec.Add(s.second.Specificity())
|
||||
}
|
||||
return spec
|
||||
}
|
||||
|
||||
// on combinedSelector, a pseudo-element only makes sens on the last
|
||||
// selector, although others increase specificity.
|
||||
func (c combinedSelector) PseudoElement() string {
|
||||
if c.second == nil {
|
||||
return ""
|
||||
}
|
||||
return c.second.PseudoElement()
|
||||
}
|
||||
|
||||
// A SelectorGroup is a list of selectors, which matches if any of the
|
||||
// individual selectors matches.
|
||||
type SelectorGroup []Sel
|
||||
|
||||
// Match returns true if the node matches one of the single selectors.
|
||||
func (s SelectorGroup) Match(n *html.Node) bool {
|
||||
for _, sel := range s {
|
||||
if sel.Match(n) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
120
vendor/github.com/andybalholm/cascadia/serialize.go
generated
vendored
Normal file
120
vendor/github.com/andybalholm/cascadia/serialize.go
generated
vendored
Normal file
|
@ -0,0 +1,120 @@
|
|||
package cascadia
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// implements the reverse operation Sel -> string
|
||||
|
||||
func (c tagSelector) String() string {
|
||||
return c.tag
|
||||
}
|
||||
|
||||
func (c idSelector) String() string {
|
||||
return "#" + c.id
|
||||
}
|
||||
|
||||
func (c classSelector) String() string {
|
||||
return "." + c.class
|
||||
}
|
||||
|
||||
func (c attrSelector) String() string {
|
||||
val := c.val
|
||||
if c.operation == "#=" {
|
||||
val = c.regexp.String()
|
||||
} else if c.operation != "" {
|
||||
val = fmt.Sprintf(`"%s"`, val)
|
||||
}
|
||||
return fmt.Sprintf(`[%s%s%s]`, c.key, c.operation, val)
|
||||
}
|
||||
|
||||
func (c relativePseudoClassSelector) String() string {
|
||||
return fmt.Sprintf(":%s(%s)", c.name, c.match.String())
|
||||
}
|
||||
func (c containsPseudoClassSelector) String() string {
|
||||
s := "contains"
|
||||
if c.own {
|
||||
s += "Own"
|
||||
}
|
||||
return fmt.Sprintf(`:%s("%s")`, s, c.value)
|
||||
}
|
||||
func (c regexpPseudoClassSelector) String() string {
|
||||
s := "matches"
|
||||
if c.own {
|
||||
s += "Own"
|
||||
}
|
||||
return fmt.Sprintf(":%s(%s)", s, c.regexp.String())
|
||||
}
|
||||
func (c nthPseudoClassSelector) String() string {
|
||||
if c.a == 0 && c.b == 1 { // special cases
|
||||
s := ":first-"
|
||||
if c.last {
|
||||
s = ":last-"
|
||||
}
|
||||
if c.ofType {
|
||||
s += "of-type"
|
||||
} else {
|
||||
s += "child"
|
||||
}
|
||||
return s
|
||||
}
|
||||
var name string
|
||||
switch [2]bool{c.last, c.ofType} {
|
||||
case [2]bool{true, true}:
|
||||
name = "nth-last-of-type"
|
||||
case [2]bool{true, false}:
|
||||
name = "nth-last-child"
|
||||
case [2]bool{false, true}:
|
||||
name = "nth-of-type"
|
||||
case [2]bool{false, false}:
|
||||
name = "nth-child"
|
||||
}
|
||||
return fmt.Sprintf(":%s(%dn+%d)", name, c.a, c.b)
|
||||
}
|
||||
func (c onlyChildPseudoClassSelector) String() string {
|
||||
if c.ofType {
|
||||
return ":only-of-type"
|
||||
}
|
||||
return ":only-child"
|
||||
}
|
||||
func (c inputPseudoClassSelector) String() string {
|
||||
return ":input"
|
||||
}
|
||||
func (c emptyElementPseudoClassSelector) String() string {
|
||||
return ":empty"
|
||||
}
|
||||
func (c rootPseudoClassSelector) String() string {
|
||||
return ":root"
|
||||
}
|
||||
|
||||
func (c compoundSelector) String() string {
|
||||
if len(c.selectors) == 0 && c.pseudoElement == "" {
|
||||
return "*"
|
||||
}
|
||||
chunks := make([]string, len(c.selectors))
|
||||
for i, sel := range c.selectors {
|
||||
chunks[i] = sel.String()
|
||||
}
|
||||
s := strings.Join(chunks, "")
|
||||
if c.pseudoElement != "" {
|
||||
s += "::" + c.pseudoElement
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (c combinedSelector) String() string {
|
||||
start := c.first.String()
|
||||
if c.second != nil {
|
||||
start += fmt.Sprintf(" %s %s", string(c.combinator), c.second.String())
|
||||
}
|
||||
return start
|
||||
}
|
||||
|
||||
func (c SelectorGroup) String() string {
|
||||
ck := make([]string, len(c))
|
||||
for i, s := range c {
|
||||
ck[i] = s.String()
|
||||
}
|
||||
return strings.Join(ck, ", ")
|
||||
}
|
26
vendor/github.com/andybalholm/cascadia/specificity.go
generated
vendored
Normal file
26
vendor/github.com/andybalholm/cascadia/specificity.go
generated
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
package cascadia
|
||||
|
||||
// Specificity is the CSS specificity as defined in
|
||||
// https://www.w3.org/TR/selectors/#specificity-rules
|
||||
// with the convention Specificity = [A,B,C].
|
||||
type Specificity [3]int
|
||||
|
||||
// returns `true` if s < other (strictly), false otherwise
|
||||
func (s Specificity) Less(other Specificity) bool {
|
||||
for i := range s {
|
||||
if s[i] < other[i] {
|
||||
return true
|
||||
}
|
||||
if s[i] > other[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (s Specificity) Add(other Specificity) Specificity {
|
||||
for i, sp := range other {
|
||||
s[i] += sp
|
||||
}
|
||||
return s
|
||||
}
|
13
vendor/github.com/jaytaylor/html2text/.travis.yml
generated
vendored
13
vendor/github.com/jaytaylor/html2text/.travis.yml
generated
vendored
|
@ -1,13 +1,12 @@
|
|||
language: go
|
||||
go:
|
||||
# n.b. For golang release history, see https://golang.org/doc/devel/release.html
|
||||
- tip
|
||||
- 1.8
|
||||
- 1.7
|
||||
- 1.6
|
||||
- 1.5
|
||||
- 1.4
|
||||
- 1.3
|
||||
- 1.2
|
||||
- "1.13.8"
|
||||
- "1.12.17"
|
||||
- "1.11.13"
|
||||
- "1.10.8"
|
||||
- "1.9.7"
|
||||
notifications:
|
||||
email:
|
||||
on_success: change
|
||||
|
|
3
vendor/github.com/jaytaylor/html2text/README.md
generated
vendored
3
vendor/github.com/jaytaylor/html2text/README.md
generated
vendored
|
@ -135,3 +135,6 @@ Email: jay at (my github username).com
|
|||
|
||||
Twitter: [@jtaylor](https://twitter.com/jtaylor)
|
||||
|
||||
# Alternatives
|
||||
|
||||
https://github.com/k3a/html2text - Lightweight
|
||||
|
|
3
vendor/github.com/json-iterator/go/.codecov.yml
generated
vendored
Normal file
3
vendor/github.com/json-iterator/go/.codecov.yml
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
ignore:
|
||||
- "output_tests/.*"
|
||||
|
4
vendor/github.com/json-iterator/go/.gitignore
generated
vendored
Normal file
4
vendor/github.com/json-iterator/go/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
/vendor
|
||||
/bug_test.go
|
||||
/coverage.txt
|
||||
/.idea
|
14
vendor/github.com/json-iterator/go/.travis.yml
generated
vendored
Normal file
14
vendor/github.com/json-iterator/go/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- 1.8.x
|
||||
- 1.x
|
||||
|
||||
before_install:
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- ./test.sh
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
21
vendor/github.com/json-iterator/go/Gopkg.lock
generated
vendored
Normal file
21
vendor/github.com/json-iterator/go/Gopkg.lock
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/modern-go/concurrent"
|
||||
packages = ["."]
|
||||
revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
|
||||
version = "1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/modern-go/reflect2"
|
||||
packages = ["."]
|
||||
revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
|
||||
version = "1.0.1"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
26
vendor/github.com/json-iterator/go/Gopkg.toml
generated
vendored
Normal file
26
vendor/github.com/json-iterator/go/Gopkg.toml
generated
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
# Gopkg.toml example
|
||||
#
|
||||
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
|
||||
# for detailed Gopkg.toml documentation.
|
||||
#
|
||||
# required = ["github.com/user/thing/cmd/thing"]
|
||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project"
|
||||
# version = "1.0.0"
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project2"
|
||||
# branch = "dev"
|
||||
# source = "github.com/myfork/project2"
|
||||
#
|
||||
# [[override]]
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
|
||||
ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/modern-go/reflect2"
|
||||
version = "1.0.1"
|
21
vendor/github.com/json-iterator/go/LICENSE
generated
vendored
Normal file
21
vendor/github.com/json-iterator/go/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2016 json-iterator
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
87
vendor/github.com/json-iterator/go/README.md
generated
vendored
Normal file
87
vendor/github.com/json-iterator/go/README.md
generated
vendored
Normal file
|
@ -0,0 +1,87 @@
|
|||
[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge)
|
||||
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](https://pkg.go.dev/github.com/json-iterator/go)
|
||||
[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go)
|
||||
[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go)
|
||||
[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go)
|
||||
[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE)
|
||||
[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
|
||||
|
||||
A high-performance 100% compatible drop-in replacement of "encoding/json"
|
||||
|
||||
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
|
||||
|
||||
# Benchmark
|
||||
|
||||
![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
|
||||
|
||||
Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
|
||||
|
||||
Raw Result (easyjson requires static code generation)
|
||||
|
||||
| | ns/op | allocation bytes | allocation times |
|
||||
| --------------- | ----------- | ---------------- | ---------------- |
|
||||
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
|
||||
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
|
||||
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
|
||||
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
|
||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||
|
||||
Always benchmark with your own workload.
|
||||
The result depends heavily on the data input.
|
||||
|
||||
# Usage
|
||||
|
||||
100% compatibility with standard lib
|
||||
|
||||
Replace
|
||||
|
||||
```go
|
||||
import "encoding/json"
|
||||
json.Marshal(&data)
|
||||
```
|
||||
|
||||
with
|
||||
|
||||
```go
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Marshal(&data)
|
||||
```
|
||||
|
||||
Replace
|
||||
|
||||
```go
|
||||
import "encoding/json"
|
||||
json.Unmarshal(input, &data)
|
||||
```
|
||||
|
||||
with
|
||||
|
||||
```go
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Unmarshal(input, &data)
|
||||
```
|
||||
|
||||
[More documentation](http://jsoniter.com/migrate-from-go-std.html)
|
||||
|
||||
# How to get
|
||||
|
||||
```
|
||||
go get github.com/json-iterator/go
|
||||
```
|
||||
|
||||
# Contribution Welcomed !
|
||||
|
||||
Contributors
|
||||
|
||||
- [thockin](https://github.com/thockin)
|
||||
- [mattn](https://github.com/mattn)
|
||||
- [cch123](https://github.com/cch123)
|
||||
- [Oleg Shaldybin](https://github.com/olegshaldybin)
|
||||
- [Jason Toffaletti](https://github.com/toffaletti)
|
||||
|
||||
Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
|
150
vendor/github.com/json-iterator/go/adapter.go
generated
vendored
Normal file
150
vendor/github.com/json-iterator/go/adapter.go
generated
vendored
Normal file
|
@ -0,0 +1,150 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
// RawMessage to make replace json with jsoniter
|
||||
type RawMessage []byte
|
||||
|
||||
// Unmarshal adapts to json/encoding Unmarshal API
|
||||
//
|
||||
// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
|
||||
// Refer to https://godoc.org/encoding/json#Unmarshal for more information
|
||||
func Unmarshal(data []byte, v interface{}) error {
|
||||
return ConfigDefault.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
// UnmarshalFromString is a convenient method to read from string instead of []byte
|
||||
func UnmarshalFromString(str string, v interface{}) error {
|
||||
return ConfigDefault.UnmarshalFromString(str, v)
|
||||
}
|
||||
|
||||
// Get quick method to get value from deeply nested JSON structure
|
||||
func Get(data []byte, path ...interface{}) Any {
|
||||
return ConfigDefault.Get(data, path...)
|
||||
}
|
||||
|
||||
// Marshal adapts to json/encoding Marshal API
|
||||
//
|
||||
// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
|
||||
// Refer to https://godoc.org/encoding/json#Marshal for more information
|
||||
func Marshal(v interface{}) ([]byte, error) {
|
||||
return ConfigDefault.Marshal(v)
|
||||
}
|
||||
|
||||
// MarshalIndent same as json.MarshalIndent. Prefix is not supported.
|
||||
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||
return ConfigDefault.MarshalIndent(v, prefix, indent)
|
||||
}
|
||||
|
||||
// MarshalToString convenient method to write as string instead of []byte
|
||||
func MarshalToString(v interface{}) (string, error) {
|
||||
return ConfigDefault.MarshalToString(v)
|
||||
}
|
||||
|
||||
// NewDecoder adapts to json/stream NewDecoder API.
|
||||
//
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// Instead of a json/encoding Decoder, an Decoder is returned
|
||||
// Refer to https://godoc.org/encoding/json#NewDecoder for more information
|
||||
func NewDecoder(reader io.Reader) *Decoder {
|
||||
return ConfigDefault.NewDecoder(reader)
|
||||
}
|
||||
|
||||
// Decoder reads and decodes JSON values from an input stream.
|
||||
// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
|
||||
type Decoder struct {
|
||||
iter *Iterator
|
||||
}
|
||||
|
||||
// Decode decode JSON into interface{}
|
||||
func (adapter *Decoder) Decode(obj interface{}) error {
|
||||
if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
|
||||
if !adapter.iter.loadMore() {
|
||||
return io.EOF
|
||||
}
|
||||
}
|
||||
adapter.iter.ReadVal(obj)
|
||||
err := adapter.iter.Error
|
||||
if err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return adapter.iter.Error
|
||||
}
|
||||
|
||||
// More is there more?
|
||||
func (adapter *Decoder) More() bool {
|
||||
iter := adapter.iter
|
||||
if iter.Error != nil {
|
||||
return false
|
||||
}
|
||||
c := iter.nextToken()
|
||||
if c == 0 {
|
||||
return false
|
||||
}
|
||||
iter.unreadByte()
|
||||
return c != ']' && c != '}'
|
||||
}
|
||||
|
||||
// Buffered remaining buffer
|
||||
func (adapter *Decoder) Buffered() io.Reader {
|
||||
remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
|
||||
return bytes.NewReader(remaining)
|
||||
}
|
||||
|
||||
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
||||
// Number instead of as a float64.
|
||||
func (adapter *Decoder) UseNumber() {
|
||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||
cfg.UseNumber = true
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func (adapter *Decoder) DisallowUnknownFields() {
|
||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||
cfg.DisallowUnknownFields = true
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// NewEncoder same as json.NewEncoder
|
||||
func NewEncoder(writer io.Writer) *Encoder {
|
||||
return ConfigDefault.NewEncoder(writer)
|
||||
}
|
||||
|
||||
// Encoder same as json.Encoder
|
||||
type Encoder struct {
|
||||
stream *Stream
|
||||
}
|
||||
|
||||
// Encode encode interface{} as JSON to io.Writer
|
||||
func (adapter *Encoder) Encode(val interface{}) error {
|
||||
adapter.stream.WriteVal(val)
|
||||
adapter.stream.WriteRaw("\n")
|
||||
adapter.stream.Flush()
|
||||
return adapter.stream.Error
|
||||
}
|
||||
|
||||
// SetIndent set the indention. Prefix is not supported
|
||||
func (adapter *Encoder) SetIndent(prefix, indent string) {
|
||||
config := adapter.stream.cfg.configBeforeFrozen
|
||||
config.IndentionStep = len(indent)
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// SetEscapeHTML escape html by default, set to false to disable
|
||||
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
|
||||
config := adapter.stream.cfg.configBeforeFrozen
|
||||
config.EscapeHTML = escapeHTML
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// Valid reports whether data is a valid JSON encoding.
|
||||
func Valid(data []byte) bool {
|
||||
return ConfigDefault.Valid(data)
|
||||
}
|
325
vendor/github.com/json-iterator/go/any.go
generated
vendored
Normal file
325
vendor/github.com/json-iterator/go/any.go
generated
vendored
Normal file
|
@ -0,0 +1,325 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Any generic object representation.
|
||||
// The lazy json implementation holds []byte and parse lazily.
|
||||
type Any interface {
|
||||
LastError() error
|
||||
ValueType() ValueType
|
||||
MustBeValid() Any
|
||||
ToBool() bool
|
||||
ToInt() int
|
||||
ToInt32() int32
|
||||
ToInt64() int64
|
||||
ToUint() uint
|
||||
ToUint32() uint32
|
||||
ToUint64() uint64
|
||||
ToFloat32() float32
|
||||
ToFloat64() float64
|
||||
ToString() string
|
||||
ToVal(val interface{})
|
||||
Get(path ...interface{}) Any
|
||||
Size() int
|
||||
Keys() []string
|
||||
GetInterface() interface{}
|
||||
WriteTo(stream *Stream)
|
||||
}
|
||||
|
||||
type baseAny struct{}
|
||||
|
||||
func (any *baseAny) Get(path ...interface{}) Any {
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
|
||||
}
|
||||
|
||||
func (any *baseAny) Size() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *baseAny) Keys() []string {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func (any *baseAny) ToVal(obj interface{}) {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// WrapInt32 turn int32 into Any interface
|
||||
func WrapInt32(val int32) Any {
|
||||
return &int32Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapInt64 turn int64 into Any interface
|
||||
func WrapInt64(val int64) Any {
|
||||
return &int64Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapUint32 turn uint32 into Any interface
|
||||
func WrapUint32(val uint32) Any {
|
||||
return &uint32Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapUint64 turn uint64 into Any interface
|
||||
func WrapUint64(val uint64) Any {
|
||||
return &uint64Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapFloat64 turn float64 into Any interface
|
||||
func WrapFloat64(val float64) Any {
|
||||
return &floatAny{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapString turn string into Any interface
|
||||
func WrapString(val string) Any {
|
||||
return &stringAny{baseAny{}, val}
|
||||
}
|
||||
|
||||
// Wrap turn a go object into Any interface
|
||||
func Wrap(val interface{}) Any {
|
||||
if val == nil {
|
||||
return &nilAny{}
|
||||
}
|
||||
asAny, isAny := val.(Any)
|
||||
if isAny {
|
||||
return asAny
|
||||
}
|
||||
typ := reflect2.TypeOf(val)
|
||||
switch typ.Kind() {
|
||||
case reflect.Slice:
|
||||
return wrapArray(val)
|
||||
case reflect.Struct:
|
||||
return wrapStruct(val)
|
||||
case reflect.Map:
|
||||
return wrapMap(val)
|
||||
case reflect.String:
|
||||
return WrapString(val.(string))
|
||||
case reflect.Int:
|
||||
if strconv.IntSize == 32 {
|
||||
return WrapInt32(int32(val.(int)))
|
||||
}
|
||||
return WrapInt64(int64(val.(int)))
|
||||
case reflect.Int8:
|
||||
return WrapInt32(int32(val.(int8)))
|
||||
case reflect.Int16:
|
||||
return WrapInt32(int32(val.(int16)))
|
||||
case reflect.Int32:
|
||||
return WrapInt32(val.(int32))
|
||||
case reflect.Int64:
|
||||
return WrapInt64(val.(int64))
|
||||
case reflect.Uint:
|
||||
if strconv.IntSize == 32 {
|
||||
return WrapUint32(uint32(val.(uint)))
|
||||
}
|
||||
return WrapUint64(uint64(val.(uint)))
|
||||
case reflect.Uintptr:
|
||||
if ptrSize == 32 {
|
||||
return WrapUint32(uint32(val.(uintptr)))
|
||||
}
|
||||
return WrapUint64(uint64(val.(uintptr)))
|
||||
case reflect.Uint8:
|
||||
return WrapUint32(uint32(val.(uint8)))
|
||||
case reflect.Uint16:
|
||||
return WrapUint32(uint32(val.(uint16)))
|
||||
case reflect.Uint32:
|
||||
return WrapUint32(uint32(val.(uint32)))
|
||||
case reflect.Uint64:
|
||||
return WrapUint64(val.(uint64))
|
||||
case reflect.Float32:
|
||||
return WrapFloat64(float64(val.(float32)))
|
||||
case reflect.Float64:
|
||||
return WrapFloat64(val.(float64))
|
||||
case reflect.Bool:
|
||||
if val.(bool) == true {
|
||||
return &trueAny{}
|
||||
}
|
||||
return &falseAny{}
|
||||
}
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)}
|
||||
}
|
||||
|
||||
// ReadAny read next JSON element as an Any object. It is a better json.RawMessage.
|
||||
func (iter *Iterator) ReadAny() Any {
|
||||
return iter.readAny()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readAny() Any {
|
||||
c := iter.nextToken()
|
||||
switch c {
|
||||
case '"':
|
||||
iter.unreadByte()
|
||||
return &stringAny{baseAny{}, iter.ReadString()}
|
||||
case 'n':
|
||||
iter.skipThreeBytes('u', 'l', 'l') // null
|
||||
return &nilAny{}
|
||||
case 't':
|
||||
iter.skipThreeBytes('r', 'u', 'e') // true
|
||||
return &trueAny{}
|
||||
case 'f':
|
||||
iter.skipFourBytes('a', 'l', 's', 'e') // false
|
||||
return &falseAny{}
|
||||
case '{':
|
||||
return iter.readObjectAny()
|
||||
case '[':
|
||||
return iter.readArrayAny()
|
||||
case '-':
|
||||
return iter.readNumberAny(false)
|
||||
case 0:
|
||||
return &invalidAny{baseAny{}, errors.New("input is empty")}
|
||||
default:
|
||||
return iter.readNumberAny(true)
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) readNumberAny(positive bool) Any {
|
||||
iter.startCapture(iter.head - 1)
|
||||
iter.skipNumber()
|
||||
lazyBuf := iter.stopCapture()
|
||||
return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
|
||||
}
|
||||
|
||||
func (iter *Iterator) readObjectAny() Any {
|
||||
iter.startCapture(iter.head - 1)
|
||||
iter.skipObject()
|
||||
lazyBuf := iter.stopCapture()
|
||||
return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
|
||||
}
|
||||
|
||||
func (iter *Iterator) readArrayAny() Any {
|
||||
iter.startCapture(iter.head - 1)
|
||||
iter.skipArray()
|
||||
lazyBuf := iter.stopCapture()
|
||||
return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
|
||||
}
|
||||
|
||||
func locateObjectField(iter *Iterator, target string) []byte {
|
||||
var found []byte
|
||||
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
|
||||
if field == target {
|
||||
found = iter.SkipAndReturnBytes()
|
||||
return false
|
||||
}
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
return found
|
||||
}
|
||||
|
||||
func locateArrayElement(iter *Iterator, target int) []byte {
|
||||
var found []byte
|
||||
n := 0
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
if n == target {
|
||||
found = iter.SkipAndReturnBytes()
|
||||
return false
|
||||
}
|
||||
iter.Skip()
|
||||
n++
|
||||
return true
|
||||
})
|
||||
return found
|
||||
}
|
||||
|
||||
func locatePath(iter *Iterator, path []interface{}) Any {
|
||||
for i, pathKeyObj := range path {
|
||||
switch pathKey := pathKeyObj.(type) {
|
||||
case string:
|
||||
valueBytes := locateObjectField(iter, pathKey)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path[i:])
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
case int:
|
||||
valueBytes := locateArrayElement(iter, pathKey)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path[i:])
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
case int32:
|
||||
if '*' == pathKey {
|
||||
return iter.readAny().Get(path[i:]...)
|
||||
}
|
||||
return newInvalidAny(path[i:])
|
||||
default:
|
||||
return newInvalidAny(path[i:])
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return &invalidAny{baseAny{}, iter.Error}
|
||||
}
|
||||
return iter.readAny()
|
||||
}
|
||||
|
||||
var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem()
|
||||
|
||||
func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typ == anyType {
|
||||
return &directAnyCodec{}
|
||||
}
|
||||
if typ.Implements(anyType) {
|
||||
return &anyCodec{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ == anyType {
|
||||
return &directAnyCodec{}
|
||||
}
|
||||
if typ.Implements(anyType) {
|
||||
return &anyCodec{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type anyCodec struct {
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := codec.valType.UnsafeIndirect(ptr)
|
||||
any := obj.(Any)
|
||||
any.WriteTo(stream)
|
||||
}
|
||||
|
||||
func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
obj := codec.valType.UnsafeIndirect(ptr)
|
||||
any := obj.(Any)
|
||||
return any.Size() == 0
|
||||
}
|
||||
|
||||
type directAnyCodec struct {
|
||||
}
|
||||
|
||||
func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*(*Any)(ptr) = iter.readAny()
|
||||
}
|
||||
|
||||
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
any := *(*Any)(ptr)
|
||||
if any == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
any.WriteTo(stream)
|
||||
}
|
||||
|
||||
func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
any := *(*Any)(ptr)
|
||||
return any.Size() == 0
|
||||
}
|
278
vendor/github.com/json-iterator/go/any_array.go
generated
vendored
Normal file
278
vendor/github.com/json-iterator/go/any_array.go
generated
vendored
Normal file
|
@ -0,0 +1,278 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type arrayLazyAny struct {
|
||||
baseAny
|
||||
cfg *frozenConfig
|
||||
buf []byte
|
||||
err error
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ValueType() ValueType {
|
||||
return ArrayValue
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToBool() bool {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.ReadArray()
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToInt() int {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToInt32() int32 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToInt64() int64 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToUint() uint {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToUint32() uint32 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToUint64() uint64 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToFloat32() float32 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToFloat64() float64 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToString() string {
|
||||
return *(*string)(unsafe.Pointer(&any.buf))
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToVal(val interface{}) {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(val)
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case int:
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
valueBytes := locateArrayElement(iter, firstPath)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
return locatePath(iter, path[1:])
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
arr := make([]Any, 0)
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
found := iter.readAny().Get(path[1:]...)
|
||||
if found.ValueType() != InvalidValue {
|
||||
arr = append(arr, found)
|
||||
}
|
||||
return true
|
||||
})
|
||||
return wrapArray(arr)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) Size() int {
|
||||
size := 0
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
size++
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
return size
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) WriteTo(stream *Stream) {
|
||||
stream.Write(any.buf)
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) GetInterface() interface{} {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.Read()
|
||||
}
|
||||
|
||||
type arrayAny struct {
|
||||
baseAny
|
||||
val reflect.Value
|
||||
}
|
||||
|
||||
func wrapArray(val interface{}) *arrayAny {
|
||||
return &arrayAny{baseAny{}, reflect.ValueOf(val)}
|
||||
}
|
||||
|
||||
func (any *arrayAny) ValueType() ValueType {
|
||||
return ArrayValue
|
||||
}
|
||||
|
||||
func (any *arrayAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *arrayAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToBool() bool {
|
||||
return any.val.Len() != 0
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToInt() int {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToInt32() int32 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToInt64() int64 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToUint() uint {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToUint32() uint32 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToUint64() uint64 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToFloat32() float32 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToFloat64() float64 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToString() string {
|
||||
str, _ := MarshalToString(any.val.Interface())
|
||||
return str
|
||||
}
|
||||
|
||||
func (any *arrayAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case int:
|
||||
if firstPath < 0 || firstPath >= any.val.Len() {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
return Wrap(any.val.Index(firstPath).Interface())
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := make([]Any, 0)
|
||||
for i := 0; i < any.val.Len(); i++ {
|
||||
mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll = append(mappedAll, mapped)
|
||||
}
|
||||
}
|
||||
return wrapArray(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *arrayAny) Size() int {
|
||||
return any.val.Len()
|
||||
}
|
||||
|
||||
func (any *arrayAny) WriteTo(stream *Stream) {
|
||||
stream.WriteVal(any.val)
|
||||
}
|
||||
|
||||
func (any *arrayAny) GetInterface() interface{} {
|
||||
return any.val.Interface()
|
||||
}
|
137
vendor/github.com/json-iterator/go/any_bool.go
generated
vendored
Normal file
137
vendor/github.com/json-iterator/go/any_bool.go
generated
vendored
Normal file
|
@ -0,0 +1,137 @@
|
|||
package jsoniter
|
||||
|
||||
type trueAny struct {
|
||||
baseAny
|
||||
}
|
||||
|
||||
func (any *trueAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *trueAny) ToBool() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *trueAny) ToInt() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToInt32() int32 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToInt64() int64 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToUint() uint {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToUint32() uint32 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToUint64() uint64 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToFloat32() float32 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToFloat64() float64 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToString() string {
|
||||
return "true"
|
||||
}
|
||||
|
||||
func (any *trueAny) WriteTo(stream *Stream) {
|
||||
stream.WriteTrue()
|
||||
}
|
||||
|
||||
func (any *trueAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *trueAny) GetInterface() interface{} {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *trueAny) ValueType() ValueType {
|
||||
return BoolValue
|
||||
}
|
||||
|
||||
func (any *trueAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
type falseAny struct {
|
||||
baseAny
|
||||
}
|
||||
|
||||
func (any *falseAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *falseAny) ToBool() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *falseAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToString() string {
|
||||
return "false"
|
||||
}
|
||||
|
||||
func (any *falseAny) WriteTo(stream *Stream) {
|
||||
stream.WriteFalse()
|
||||
}
|
||||
|
||||
func (any *falseAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *falseAny) GetInterface() interface{} {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *falseAny) ValueType() ValueType {
|
||||
return BoolValue
|
||||
}
|
||||
|
||||
func (any *falseAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
83
vendor/github.com/json-iterator/go/any_float.go
generated
vendored
Normal file
83
vendor/github.com/json-iterator/go/any_float.go
generated
vendored
Normal file
|
@ -0,0 +1,83 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type floatAny struct {
|
||||
baseAny
|
||||
val float64
|
||||
}
|
||||
|
||||
func (any *floatAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *floatAny) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *floatAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *floatAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *floatAny) ToBool() bool {
|
||||
return any.ToFloat64() != 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToUint() uint {
|
||||
if any.val > 0 {
|
||||
return uint(any.val)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToUint32() uint32 {
|
||||
if any.val > 0 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToUint64() uint64 {
|
||||
if any.val > 0 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToFloat64() float64 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *floatAny) ToString() string {
|
||||
return strconv.FormatFloat(any.val, 'E', -1, 64)
|
||||
}
|
||||
|
||||
func (any *floatAny) WriteTo(stream *Stream) {
|
||||
stream.WriteFloat64(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_int32.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_int32.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type int32Any struct {
|
||||
baseAny
|
||||
val int32
|
||||
}
|
||||
|
||||
func (any *int32Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int32Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *int32Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *int32Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *int32Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToInt32() int32 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *int32Any) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToUint32() uint32 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToUint64() uint64 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToString() string {
|
||||
return strconv.FormatInt(int64(any.val), 10)
|
||||
}
|
||||
|
||||
func (any *int32Any) WriteTo(stream *Stream) {
|
||||
stream.WriteInt32(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int32Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_int64.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_int64.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type int64Any struct {
|
||||
baseAny
|
||||
val int64
|
||||
}
|
||||
|
||||
func (any *int64Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int64Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *int64Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *int64Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *int64Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToInt64() int64 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *int64Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToUint32() uint32 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToUint64() uint64 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToString() string {
|
||||
return strconv.FormatInt(any.val, 10)
|
||||
}
|
||||
|
||||
func (any *int64Any) WriteTo(stream *Stream) {
|
||||
stream.WriteInt64(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int64Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
82
vendor/github.com/json-iterator/go/any_invalid.go
generated
vendored
Normal file
82
vendor/github.com/json-iterator/go/any_invalid.go
generated
vendored
Normal file
|
@ -0,0 +1,82 @@
|
|||
package jsoniter
|
||||
|
||||
import "fmt"
|
||||
|
||||
type invalidAny struct {
|
||||
baseAny
|
||||
err error
|
||||
}
|
||||
|
||||
func newInvalidAny(path []interface{}) *invalidAny {
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
|
||||
}
|
||||
|
||||
func (any *invalidAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *invalidAny) ValueType() ValueType {
|
||||
return InvalidValue
|
||||
}
|
||||
|
||||
func (any *invalidAny) MustBeValid() Any {
|
||||
panic(any.err)
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToBool() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToString() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (any *invalidAny) WriteTo(stream *Stream) {
|
||||
}
|
||||
|
||||
func (any *invalidAny) Get(path ...interface{}) Any {
|
||||
if any.err == nil {
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)}
|
||||
}
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
|
||||
}
|
||||
|
||||
func (any *invalidAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *invalidAny) GetInterface() interface{} {
|
||||
return nil
|
||||
}
|
69
vendor/github.com/json-iterator/go/any_nil.go
generated
vendored
Normal file
69
vendor/github.com/json-iterator/go/any_nil.go
generated
vendored
Normal file
|
@ -0,0 +1,69 @@
|
|||
package jsoniter
|
||||
|
||||
type nilAny struct {
|
||||
baseAny
|
||||
}
|
||||
|
||||
func (any *nilAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *nilAny) ValueType() ValueType {
|
||||
return NilValue
|
||||
}
|
||||
|
||||
func (any *nilAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *nilAny) ToBool() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *nilAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToString() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (any *nilAny) WriteTo(stream *Stream) {
|
||||
stream.WriteNil()
|
||||
}
|
||||
|
||||
func (any *nilAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *nilAny) GetInterface() interface{} {
|
||||
return nil
|
||||
}
|
123
vendor/github.com/json-iterator/go/any_number.go
generated
vendored
Normal file
123
vendor/github.com/json-iterator/go/any_number.go
generated
vendored
Normal file
|
@ -0,0 +1,123 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"io"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type numberLazyAny struct {
|
||||
baseAny
|
||||
cfg *frozenConfig
|
||||
buf []byte
|
||||
err error
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToBool() bool {
|
||||
return any.ToFloat64() != 0
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToInt() int {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadInt()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToInt32() int32 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadInt32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToInt64() int64 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadInt64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToUint() uint {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadUint()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToUint32() uint32 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadUint32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToUint64() uint64 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadUint64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToFloat32() float32 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadFloat32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToFloat64() float64 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadFloat64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToString() string {
|
||||
return *(*string)(unsafe.Pointer(&any.buf))
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) WriteTo(stream *Stream) {
|
||||
stream.Write(any.buf)
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) GetInterface() interface{} {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.Read()
|
||||
}
|
374
vendor/github.com/json-iterator/go/any_object.go
generated
vendored
Normal file
374
vendor/github.com/json-iterator/go/any_object.go
generated
vendored
Normal file
|
@ -0,0 +1,374 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type objectLazyAny struct {
|
||||
baseAny
|
||||
cfg *frozenConfig
|
||||
buf []byte
|
||||
err error
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ValueType() ValueType {
|
||||
return ObjectValue
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToBool() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToString() string {
|
||||
return *(*string)(unsafe.Pointer(&any.buf))
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToVal(obj interface{}) {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(obj)
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case string:
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
valueBytes := locateObjectField(iter, firstPath)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
return locatePath(iter, path[1:])
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := map[string]Any{}
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadMapCB(func(iter *Iterator, field string) bool {
|
||||
mapped := locatePath(iter, path[1:])
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll[field] = mapped
|
||||
}
|
||||
return true
|
||||
})
|
||||
return wrapMap(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) Keys() []string {
|
||||
keys := []string{}
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadMapCB(func(iter *Iterator, field string) bool {
|
||||
iter.Skip()
|
||||
keys = append(keys, field)
|
||||
return true
|
||||
})
|
||||
return keys
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) Size() int {
|
||||
size := 0
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
|
||||
iter.Skip()
|
||||
size++
|
||||
return true
|
||||
})
|
||||
return size
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) WriteTo(stream *Stream) {
|
||||
stream.Write(any.buf)
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) GetInterface() interface{} {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.Read()
|
||||
}
|
||||
|
||||
type objectAny struct {
|
||||
baseAny
|
||||
err error
|
||||
val reflect.Value
|
||||
}
|
||||
|
||||
func wrapStruct(val interface{}) *objectAny {
|
||||
return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
|
||||
}
|
||||
|
||||
func (any *objectAny) ValueType() ValueType {
|
||||
return ObjectValue
|
||||
}
|
||||
|
||||
func (any *objectAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *objectAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *objectAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *objectAny) ToBool() bool {
|
||||
return any.val.NumField() != 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToString() string {
|
||||
str, err := MarshalToString(any.val.Interface())
|
||||
any.err = err
|
||||
return str
|
||||
}
|
||||
|
||||
func (any *objectAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case string:
|
||||
field := any.val.FieldByName(firstPath)
|
||||
if !field.IsValid() {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
return Wrap(field.Interface())
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := map[string]Any{}
|
||||
for i := 0; i < any.val.NumField(); i++ {
|
||||
field := any.val.Field(i)
|
||||
if field.CanInterface() {
|
||||
mapped := Wrap(field.Interface()).Get(path[1:]...)
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll[any.val.Type().Field(i).Name] = mapped
|
||||
}
|
||||
}
|
||||
}
|
||||
return wrapMap(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *objectAny) Keys() []string {
|
||||
keys := make([]string, 0, any.val.NumField())
|
||||
for i := 0; i < any.val.NumField(); i++ {
|
||||
keys = append(keys, any.val.Type().Field(i).Name)
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func (any *objectAny) Size() int {
|
||||
return any.val.NumField()
|
||||
}
|
||||
|
||||
func (any *objectAny) WriteTo(stream *Stream) {
|
||||
stream.WriteVal(any.val)
|
||||
}
|
||||
|
||||
func (any *objectAny) GetInterface() interface{} {
|
||||
return any.val.Interface()
|
||||
}
|
||||
|
||||
type mapAny struct {
|
||||
baseAny
|
||||
err error
|
||||
val reflect.Value
|
||||
}
|
||||
|
||||
func wrapMap(val interface{}) *mapAny {
|
||||
return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
|
||||
}
|
||||
|
||||
func (any *mapAny) ValueType() ValueType {
|
||||
return ObjectValue
|
||||
}
|
||||
|
||||
func (any *mapAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *mapAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *mapAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *mapAny) ToBool() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *mapAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToString() string {
|
||||
str, err := MarshalToString(any.val.Interface())
|
||||
any.err = err
|
||||
return str
|
||||
}
|
||||
|
||||
func (any *mapAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := map[string]Any{}
|
||||
for _, key := range any.val.MapKeys() {
|
||||
keyAsStr := key.String()
|
||||
element := Wrap(any.val.MapIndex(key).Interface())
|
||||
mapped := element.Get(path[1:]...)
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll[keyAsStr] = mapped
|
||||
}
|
||||
}
|
||||
return wrapMap(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
value := any.val.MapIndex(reflect.ValueOf(firstPath))
|
||||
if !value.IsValid() {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
return Wrap(value.Interface())
|
||||
}
|
||||
}
|
||||
|
||||
func (any *mapAny) Keys() []string {
|
||||
keys := make([]string, 0, any.val.Len())
|
||||
for _, key := range any.val.MapKeys() {
|
||||
keys = append(keys, key.String())
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func (any *mapAny) Size() int {
|
||||
return any.val.Len()
|
||||
}
|
||||
|
||||
func (any *mapAny) WriteTo(stream *Stream) {
|
||||
stream.WriteVal(any.val)
|
||||
}
|
||||
|
||||
func (any *mapAny) GetInterface() interface{} {
|
||||
return any.val.Interface()
|
||||
}
|
166
vendor/github.com/json-iterator/go/any_str.go
generated
vendored
Normal file
166
vendor/github.com/json-iterator/go/any_str.go
generated
vendored
Normal file
|
@ -0,0 +1,166 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type stringAny struct {
|
||||
baseAny
|
||||
val string
|
||||
}
|
||||
|
||||
func (any *stringAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
|
||||
}
|
||||
|
||||
func (any *stringAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *stringAny) ValueType() ValueType {
|
||||
return StringValue
|
||||
}
|
||||
|
||||
func (any *stringAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *stringAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *stringAny) ToBool() bool {
|
||||
str := any.ToString()
|
||||
if str == "0" {
|
||||
return false
|
||||
}
|
||||
for _, c := range str {
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *stringAny) ToInt() int {
|
||||
return int(any.ToInt64())
|
||||
|
||||
}
|
||||
|
||||
func (any *stringAny) ToInt32() int32 {
|
||||
return int32(any.ToInt64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToInt64() int64 {
|
||||
if any.val == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
flag := 1
|
||||
startPos := 0
|
||||
if any.val[0] == '+' || any.val[0] == '-' {
|
||||
startPos = 1
|
||||
}
|
||||
|
||||
if any.val[0] == '-' {
|
||||
flag = -1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64)
|
||||
return int64(flag) * parsed
|
||||
}
|
||||
|
||||
func (any *stringAny) ToUint() uint {
|
||||
return uint(any.ToUint64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToUint32() uint32 {
|
||||
return uint32(any.ToUint64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToUint64() uint64 {
|
||||
if any.val == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
startPos := 0
|
||||
|
||||
if any.val[0] == '-' {
|
||||
return 0
|
||||
}
|
||||
if any.val[0] == '+' {
|
||||
startPos = 1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64)
|
||||
return parsed
|
||||
}
|
||||
|
||||
func (any *stringAny) ToFloat32() float32 {
|
||||
return float32(any.ToFloat64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToFloat64() float64 {
|
||||
if len(any.val) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
// first char invalid
|
||||
if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') {
|
||||
return 0
|
||||
}
|
||||
|
||||
// extract valid num expression from string
|
||||
// eg 123true => 123, -12.12xxa => -12.12
|
||||
endPos := 1
|
||||
for i := 1; i < len(any.val); i++ {
|
||||
if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' {
|
||||
endPos = i + 1
|
||||
continue
|
||||
}
|
||||
|
||||
// end position is the first char which is not digit
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
} else {
|
||||
endPos = i
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed, _ := strconv.ParseFloat(any.val[:endPos], 64)
|
||||
return parsed
|
||||
}
|
||||
|
||||
func (any *stringAny) ToString() string {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *stringAny) WriteTo(stream *Stream) {
|
||||
stream.WriteString(any.val)
|
||||
}
|
||||
|
||||
func (any *stringAny) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_uint32.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_uint32.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type uint32Any struct {
|
||||
baseAny
|
||||
val uint32
|
||||
}
|
||||
|
||||
func (any *uint32Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint32Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *uint32Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToUint32() uint32 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToUint64() uint64 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToString() string {
|
||||
return strconv.FormatInt(int64(any.val), 10)
|
||||
}
|
||||
|
||||
func (any *uint32Any) WriteTo(stream *Stream) {
|
||||
stream.WriteUint32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint32Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_uint64.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_uint64.go
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type uint64Any struct {
|
||||
baseAny
|
||||
val uint64
|
||||
}
|
||||
|
||||
func (any *uint64Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint64Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *uint64Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToUint32() uint32 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToUint64() uint64 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToString() string {
|
||||
return strconv.FormatUint(any.val, 10)
|
||||
}
|
||||
|
||||
func (any *uint64Any) WriteTo(stream *Stream) {
|
||||
stream.WriteUint64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint64Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
12
vendor/github.com/json-iterator/go/build.sh
generated
vendored
Normal file
12
vendor/github.com/json-iterator/go/build.sh
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
set -x
|
||||
|
||||
if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
|
||||
mkdir -p /tmp/build-golang/src/github.com/json-iterator
|
||||
ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
|
||||
fi
|
||||
export GOPATH=/tmp/build-golang
|
||||
go get -u github.com/golang/dep/cmd/dep
|
||||
cd /tmp/build-golang/src/github.com/json-iterator/go
|
||||
exec $GOPATH/bin/dep ensure -update
|
375
vendor/github.com/json-iterator/go/config.go
generated
vendored
Normal file
375
vendor/github.com/json-iterator/go/config.go
generated
vendored
Normal file
|
@ -0,0 +1,375 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"reflect"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/concurrent"
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
// Config customize how the API should behave.
|
||||
// The API is created from Config by Froze.
|
||||
type Config struct {
|
||||
IndentionStep int
|
||||
MarshalFloatWith6Digits bool
|
||||
EscapeHTML bool
|
||||
SortMapKeys bool
|
||||
UseNumber bool
|
||||
DisallowUnknownFields bool
|
||||
TagKey string
|
||||
OnlyTaggedField bool
|
||||
ValidateJsonRawMessage bool
|
||||
ObjectFieldMustBeSimpleString bool
|
||||
CaseSensitive bool
|
||||
}
|
||||
|
||||
// API the public interface of this package.
|
||||
// Primary Marshal and Unmarshal.
|
||||
type API interface {
|
||||
IteratorPool
|
||||
StreamPool
|
||||
MarshalToString(v interface{}) (string, error)
|
||||
Marshal(v interface{}) ([]byte, error)
|
||||
MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
|
||||
UnmarshalFromString(str string, v interface{}) error
|
||||
Unmarshal(data []byte, v interface{}) error
|
||||
Get(data []byte, path ...interface{}) Any
|
||||
NewEncoder(writer io.Writer) *Encoder
|
||||
NewDecoder(reader io.Reader) *Decoder
|
||||
Valid(data []byte) bool
|
||||
RegisterExtension(extension Extension)
|
||||
DecoderOf(typ reflect2.Type) ValDecoder
|
||||
EncoderOf(typ reflect2.Type) ValEncoder
|
||||
}
|
||||
|
||||
// ConfigDefault the default API
|
||||
var ConfigDefault = Config{
|
||||
EscapeHTML: true,
|
||||
}.Froze()
|
||||
|
||||
// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior
|
||||
var ConfigCompatibleWithStandardLibrary = Config{
|
||||
EscapeHTML: true,
|
||||
SortMapKeys: true,
|
||||
ValidateJsonRawMessage: true,
|
||||
}.Froze()
|
||||
|
||||
// ConfigFastest marshals float with only 6 digits precision
|
||||
var ConfigFastest = Config{
|
||||
EscapeHTML: false,
|
||||
MarshalFloatWith6Digits: true, // will lose precession
|
||||
ObjectFieldMustBeSimpleString: true, // do not unescape object field
|
||||
}.Froze()
|
||||
|
||||
type frozenConfig struct {
|
||||
configBeforeFrozen Config
|
||||
sortMapKeys bool
|
||||
indentionStep int
|
||||
objectFieldMustBeSimpleString bool
|
||||
onlyTaggedField bool
|
||||
disallowUnknownFields bool
|
||||
decoderCache *concurrent.Map
|
||||
encoderCache *concurrent.Map
|
||||
encoderExtension Extension
|
||||
decoderExtension Extension
|
||||
extraExtensions []Extension
|
||||
streamPool *sync.Pool
|
||||
iteratorPool *sync.Pool
|
||||
caseSensitive bool
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) initCache() {
|
||||
cfg.decoderCache = concurrent.NewMap()
|
||||
cfg.encoderCache = concurrent.NewMap()
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
|
||||
cfg.decoderCache.Store(cacheKey, decoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
|
||||
cfg.encoderCache.Store(cacheKey, encoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
|
||||
decoder, found := cfg.decoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return decoder.(ValDecoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
|
||||
encoder, found := cfg.encoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return encoder.(ValEncoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var cfgCache = concurrent.NewMap()
|
||||
|
||||
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
|
||||
obj, found := cfgCache.Load(cfg)
|
||||
if found {
|
||||
return obj.(*frozenConfig)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
|
||||
cfgCache.Store(cfg, frozenConfig)
|
||||
}
|
||||
|
||||
// Froze forge API from config
|
||||
func (cfg Config) Froze() API {
|
||||
api := &frozenConfig{
|
||||
sortMapKeys: cfg.SortMapKeys,
|
||||
indentionStep: cfg.IndentionStep,
|
||||
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
|
||||
onlyTaggedField: cfg.OnlyTaggedField,
|
||||
disallowUnknownFields: cfg.DisallowUnknownFields,
|
||||
caseSensitive: cfg.CaseSensitive,
|
||||
}
|
||||
api.streamPool = &sync.Pool{
|
||||
New: func() interface{} {
|
||||
return NewStream(api, nil, 512)
|
||||
},
|
||||
}
|
||||
api.iteratorPool = &sync.Pool{
|
||||
New: func() interface{} {
|
||||
return NewIterator(api)
|
||||
},
|
||||
}
|
||||
api.initCache()
|
||||
encoderExtension := EncoderExtension{}
|
||||
decoderExtension := DecoderExtension{}
|
||||
if cfg.MarshalFloatWith6Digits {
|
||||
api.marshalFloatWith6Digits(encoderExtension)
|
||||
}
|
||||
if cfg.EscapeHTML {
|
||||
api.escapeHTML(encoderExtension)
|
||||
}
|
||||
if cfg.UseNumber {
|
||||
api.useNumber(decoderExtension)
|
||||
}
|
||||
if cfg.ValidateJsonRawMessage {
|
||||
api.validateJsonRawMessage(encoderExtension)
|
||||
}
|
||||
api.encoderExtension = encoderExtension
|
||||
api.decoderExtension = decoderExtension
|
||||
api.configBeforeFrozen = cfg
|
||||
return api
|
||||
}
|
||||
|
||||
func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
|
||||
api := getFrozenConfigFromCache(cfg)
|
||||
if api != nil {
|
||||
return api
|
||||
}
|
||||
api = cfg.Froze().(*frozenConfig)
|
||||
for _, extension := range extraExtensions {
|
||||
api.RegisterExtension(extension)
|
||||
}
|
||||
addFrozenConfigToCache(cfg, api)
|
||||
return api
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
|
||||
encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
|
||||
rawMessage := *(*json.RawMessage)(ptr)
|
||||
iter := cfg.BorrowIterator([]byte(rawMessage))
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.Read()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
stream.WriteRaw("null")
|
||||
} else {
|
||||
stream.WriteRaw(string(rawMessage))
|
||||
}
|
||||
}, func(ptr unsafe.Pointer) bool {
|
||||
return len(*((*json.RawMessage)(ptr))) == 0
|
||||
}}
|
||||
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
|
||||
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
|
||||
extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
|
||||
exitingValue := *((*interface{})(ptr))
|
||||
if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
|
||||
iter.ReadVal(exitingValue)
|
||||
return
|
||||
}
|
||||
if iter.WhatIsNext() == NumberValue {
|
||||
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
|
||||
} else {
|
||||
*((*interface{})(ptr)) = iter.Read()
|
||||
}
|
||||
}}
|
||||
}
|
||||
func (cfg *frozenConfig) getTagKey() string {
|
||||
tagKey := cfg.configBeforeFrozen.TagKey
|
||||
if tagKey == "" {
|
||||
return "json"
|
||||
}
|
||||
return tagKey
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
|
||||
cfg.extraExtensions = append(cfg.extraExtensions, extension)
|
||||
copied := cfg.configBeforeFrozen
|
||||
cfg.configBeforeFrozen = copied
|
||||
}
|
||||
|
||||
type lossyFloat32Encoder struct {
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat32Lossy(*((*float32)(ptr)))
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float32)(ptr)) == 0
|
||||
}
|
||||
|
||||
type lossyFloat64Encoder struct {
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat64Lossy(*((*float64)(ptr)))
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float64)(ptr)) == 0
|
||||
}
|
||||
|
||||
// EnableLossyFloatMarshalling keeps 10**(-6) precision
|
||||
// for float variables for better performance.
|
||||
func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) {
|
||||
// for better performance
|
||||
extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{}
|
||||
extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{}
|
||||
}
|
||||
|
||||
type htmlEscapedStringEncoder struct {
|
||||
}
|
||||
|
||||
func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
str := *((*string)(ptr))
|
||||
stream.WriteStringWithHTMLEscaped(str)
|
||||
}
|
||||
|
||||
func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*string)(ptr)) == ""
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) {
|
||||
encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) cleanDecoders() {
|
||||
typeDecoders = map[string]ValDecoder{}
|
||||
fieldDecoders = map[string]ValDecoder{}
|
||||
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) cleanEncoders() {
|
||||
typeEncoders = map[string]ValEncoder{}
|
||||
fieldEncoders = map[string]ValEncoder{}
|
||||
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
|
||||
stream := cfg.BorrowStream(nil)
|
||||
defer cfg.ReturnStream(stream)
|
||||
stream.WriteVal(v)
|
||||
if stream.Error != nil {
|
||||
return "", stream.Error
|
||||
}
|
||||
return string(stream.Buffer()), nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
|
||||
stream := cfg.BorrowStream(nil)
|
||||
defer cfg.ReturnStream(stream)
|
||||
stream.WriteVal(v)
|
||||
if stream.Error != nil {
|
||||
return nil, stream.Error
|
||||
}
|
||||
result := stream.Buffer()
|
||||
copied := make([]byte, len(result))
|
||||
copy(copied, result)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||
if prefix != "" {
|
||||
panic("prefix is not supported")
|
||||
}
|
||||
for _, r := range indent {
|
||||
if r != ' ' {
|
||||
panic("indent can only be space")
|
||||
}
|
||||
}
|
||||
newCfg := cfg.configBeforeFrozen
|
||||
newCfg.IndentionStep = len(indent)
|
||||
return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
|
||||
data := []byte(str)
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(v)
|
||||
c := iter.nextToken()
|
||||
if c == 0 {
|
||||
if iter.Error == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return iter.Error
|
||||
}
|
||||
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
|
||||
return iter.Error
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
return locatePath(iter, path)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(v)
|
||||
c := iter.nextToken()
|
||||
if c == 0 {
|
||||
if iter.Error == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return iter.Error
|
||||
}
|
||||
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
|
||||
return iter.Error
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
|
||||
stream := NewStream(cfg, writer, 512)
|
||||
return &Encoder{stream}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
|
||||
iter := Parse(cfg, reader, 512)
|
||||
return &Decoder{iter}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Valid(data []byte) bool {
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.Skip()
|
||||
return iter.Error == nil
|
||||
}
|
7
vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
generated
vendored
Normal file
7
vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
| json type \ dest type | bool | int | uint | float |string|
|
||||
| --- | --- | --- | --- |--|--|
|
||||
| number | positive => true <br/> negative => true <br/> zero => false| 23.2 => 23 <br/> -32.1 => -32| 12.1 => 12 <br/> -12.1 => 0|as normal|same as origin|
|
||||
| string | empty string => false <br/> string "0" => false <br/> other strings => true | "123.32" => 123 <br/> "-123.4" => -123 <br/> "123.23xxxw" => 123 <br/> "abcde12" => 0 <br/> "-32.1" => -32| 13.2 => 13 <br/> -1.1 => 0 |12.1 => 12.1 <br/> -12.3 => -12.3<br/> 12.4xxa => 12.4 <br/> +1.1e2 =>110 |same as origin|
|
||||
| bool | true => true <br/> false => false| true => 1 <br/> false => 0 | true => 1 <br/> false => 0 |true => 1 <br/>false => 0|true => "true" <br/> false => "false"|
|
||||
| object | true | 0 | 0 |0|originnal json|
|
||||
| array | empty array => false <br/> nonempty array => true| [] => 0 <br/> [1,2] => 1 | [] => 0 <br/> [1,2] => 1 |[] => 0<br/>[1,2] => 1|original json|
|
11
vendor/github.com/json-iterator/go/go.mod
generated
vendored
Normal file
11
vendor/github.com/json-iterator/go/go.mod
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
module github.com/json-iterator/go
|
||||
|
||||
go 1.12
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/google/gofuzz v1.0.0
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742
|
||||
github.com/stretchr/testify v1.3.0
|
||||
)
|
14
vendor/github.com/json-iterator/go/go.sum
generated
vendored
Normal file
14
vendor/github.com/json-iterator/go/go.sum
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
349
vendor/github.com/json-iterator/go/iter.go
generated
vendored
Normal file
349
vendor/github.com/json-iterator/go/iter.go
generated
vendored
Normal file
|
@ -0,0 +1,349 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
// ValueType the type for JSON element
|
||||
type ValueType int
|
||||
|
||||
const (
|
||||
// InvalidValue invalid JSON element
|
||||
InvalidValue ValueType = iota
|
||||
// StringValue JSON element "string"
|
||||
StringValue
|
||||
// NumberValue JSON element 100 or 0.10
|
||||
NumberValue
|
||||
// NilValue JSON element null
|
||||
NilValue
|
||||
// BoolValue JSON element true or false
|
||||
BoolValue
|
||||
// ArrayValue JSON element []
|
||||
ArrayValue
|
||||
// ObjectValue JSON element {}
|
||||
ObjectValue
|
||||
)
|
||||
|
||||
var hexDigits []byte
|
||||
var valueTypes []ValueType
|
||||
|
||||
func init() {
|
||||
hexDigits = make([]byte, 256)
|
||||
for i := 0; i < len(hexDigits); i++ {
|
||||
hexDigits[i] = 255
|
||||
}
|
||||
for i := '0'; i <= '9'; i++ {
|
||||
hexDigits[i] = byte(i - '0')
|
||||
}
|
||||
for i := 'a'; i <= 'f'; i++ {
|
||||
hexDigits[i] = byte((i - 'a') + 10)
|
||||
}
|
||||
for i := 'A'; i <= 'F'; i++ {
|
||||
hexDigits[i] = byte((i - 'A') + 10)
|
||||
}
|
||||
valueTypes = make([]ValueType, 256)
|
||||
for i := 0; i < len(valueTypes); i++ {
|
||||
valueTypes[i] = InvalidValue
|
||||
}
|
||||
valueTypes['"'] = StringValue
|
||||
valueTypes['-'] = NumberValue
|
||||
valueTypes['0'] = NumberValue
|
||||
valueTypes['1'] = NumberValue
|
||||
valueTypes['2'] = NumberValue
|
||||
valueTypes['3'] = NumberValue
|
||||
valueTypes['4'] = NumberValue
|
||||
valueTypes['5'] = NumberValue
|
||||
valueTypes['6'] = NumberValue
|
||||
valueTypes['7'] = NumberValue
|
||||
valueTypes['8'] = NumberValue
|
||||
valueTypes['9'] = NumberValue
|
||||
valueTypes['t'] = BoolValue
|
||||
valueTypes['f'] = BoolValue
|
||||
valueTypes['n'] = NilValue
|
||||
valueTypes['['] = ArrayValue
|
||||
valueTypes['{'] = ObjectValue
|
||||
}
|
||||
|
||||
// Iterator is a io.Reader like object, with JSON specific read functions.
|
||||
// Error is not returned as return value, but stored as Error member on this iterator instance.
|
||||
type Iterator struct {
|
||||
cfg *frozenConfig
|
||||
reader io.Reader
|
||||
buf []byte
|
||||
head int
|
||||
tail int
|
||||
depth int
|
||||
captureStartedAt int
|
||||
captured []byte
|
||||
Error error
|
||||
Attachment interface{} // open for customized decoder
|
||||
}
|
||||
|
||||
// NewIterator creates an empty Iterator instance
|
||||
func NewIterator(cfg API) *Iterator {
|
||||
return &Iterator{
|
||||
cfg: cfg.(*frozenConfig),
|
||||
reader: nil,
|
||||
buf: nil,
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Parse creates an Iterator instance from io.Reader
|
||||
func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
|
||||
return &Iterator{
|
||||
cfg: cfg.(*frozenConfig),
|
||||
reader: reader,
|
||||
buf: make([]byte, bufSize),
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// ParseBytes creates an Iterator instance from byte array
|
||||
func ParseBytes(cfg API, input []byte) *Iterator {
|
||||
return &Iterator{
|
||||
cfg: cfg.(*frozenConfig),
|
||||
reader: nil,
|
||||
buf: input,
|
||||
head: 0,
|
||||
tail: len(input),
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// ParseString creates an Iterator instance from string
|
||||
func ParseString(cfg API, input string) *Iterator {
|
||||
return ParseBytes(cfg, []byte(input))
|
||||
}
|
||||
|
||||
// Pool returns a pool can provide more iterator with same configuration
|
||||
func (iter *Iterator) Pool() IteratorPool {
|
||||
return iter.cfg
|
||||
}
|
||||
|
||||
// Reset reuse iterator instance by specifying another reader
|
||||
func (iter *Iterator) Reset(reader io.Reader) *Iterator {
|
||||
iter.reader = reader
|
||||
iter.head = 0
|
||||
iter.tail = 0
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
// ResetBytes reuse iterator instance by specifying another byte array as input
|
||||
func (iter *Iterator) ResetBytes(input []byte) *Iterator {
|
||||
iter.reader = nil
|
||||
iter.buf = input
|
||||
iter.head = 0
|
||||
iter.tail = len(input)
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
// WhatIsNext gets ValueType of relatively next json element
|
||||
func (iter *Iterator) WhatIsNext() ValueType {
|
||||
valueType := valueTypes[iter.nextToken()]
|
||||
iter.unreadByte()
|
||||
return valueType
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
continue
|
||||
}
|
||||
iter.head = i
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (iter *Iterator) isObjectEnd() bool {
|
||||
c := iter.nextToken()
|
||||
if c == ',' {
|
||||
return false
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
|
||||
return true
|
||||
}
|
||||
|
||||
func (iter *Iterator) nextToken() byte {
|
||||
// a variation of skip whitespaces, returning the next non-whitespace token
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
continue
|
||||
}
|
||||
iter.head = i + 1
|
||||
return c
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ReportError record a error in iterator instance with current position.
|
||||
func (iter *Iterator) ReportError(operation string, msg string) {
|
||||
if iter.Error != nil {
|
||||
if iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
}
|
||||
peekStart := iter.head - 10
|
||||
if peekStart < 0 {
|
||||
peekStart = 0
|
||||
}
|
||||
peekEnd := iter.head + 10
|
||||
if peekEnd > iter.tail {
|
||||
peekEnd = iter.tail
|
||||
}
|
||||
parsing := string(iter.buf[peekStart:peekEnd])
|
||||
contextStart := iter.head - 50
|
||||
if contextStart < 0 {
|
||||
contextStart = 0
|
||||
}
|
||||
contextEnd := iter.head + 50
|
||||
if contextEnd > iter.tail {
|
||||
contextEnd = iter.tail
|
||||
}
|
||||
context := string(iter.buf[contextStart:contextEnd])
|
||||
iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
|
||||
operation, msg, iter.head-peekStart, parsing, context)
|
||||
}
|
||||
|
||||
// CurrentBuffer gets current buffer as string for debugging purpose
|
||||
func (iter *Iterator) CurrentBuffer() string {
|
||||
peekStart := iter.head - 10
|
||||
if peekStart < 0 {
|
||||
peekStart = 0
|
||||
}
|
||||
return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
|
||||
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
|
||||
}
|
||||
|
||||
func (iter *Iterator) readByte() (ret byte) {
|
||||
if iter.head == iter.tail {
|
||||
if iter.loadMore() {
|
||||
ret = iter.buf[iter.head]
|
||||
iter.head++
|
||||
return ret
|
||||
}
|
||||
return 0
|
||||
}
|
||||
ret = iter.buf[iter.head]
|
||||
iter.head++
|
||||
return ret
|
||||
}
|
||||
|
||||
func (iter *Iterator) loadMore() bool {
|
||||
if iter.reader == nil {
|
||||
if iter.Error == nil {
|
||||
iter.head = iter.tail
|
||||
iter.Error = io.EOF
|
||||
}
|
||||
return false
|
||||
}
|
||||
if iter.captured != nil {
|
||||
iter.captured = append(iter.captured,
|
||||
iter.buf[iter.captureStartedAt:iter.tail]...)
|
||||
iter.captureStartedAt = 0
|
||||
}
|
||||
for {
|
||||
n, err := iter.reader.Read(iter.buf)
|
||||
if n == 0 {
|
||||
if err != nil {
|
||||
if iter.Error == nil {
|
||||
iter.Error = err
|
||||
}
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
iter.head = 0
|
||||
iter.tail = n
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) unreadByte() {
|
||||
if iter.Error != nil {
|
||||
return
|
||||
}
|
||||
iter.head--
|
||||
return
|
||||
}
|
||||
|
||||
// Read read the next JSON element as generic interface{}.
|
||||
func (iter *Iterator) Read() interface{} {
|
||||
valueType := iter.WhatIsNext()
|
||||
switch valueType {
|
||||
case StringValue:
|
||||
return iter.ReadString()
|
||||
case NumberValue:
|
||||
if iter.cfg.configBeforeFrozen.UseNumber {
|
||||
return json.Number(iter.readNumberAsString())
|
||||
}
|
||||
return iter.ReadFloat64()
|
||||
case NilValue:
|
||||
iter.skipFourBytes('n', 'u', 'l', 'l')
|
||||
return nil
|
||||
case BoolValue:
|
||||
return iter.ReadBool()
|
||||
case ArrayValue:
|
||||
arr := []interface{}{}
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
var elem interface{}
|
||||
iter.ReadVal(&elem)
|
||||
arr = append(arr, elem)
|
||||
return true
|
||||
})
|
||||
return arr
|
||||
case ObjectValue:
|
||||
obj := map[string]interface{}{}
|
||||
iter.ReadMapCB(func(Iter *Iterator, field string) bool {
|
||||
var elem interface{}
|
||||
iter.ReadVal(&elem)
|
||||
obj[field] = elem
|
||||
return true
|
||||
})
|
||||
return obj
|
||||
default:
|
||||
iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
|
||||
const maxDepth = 10000
|
||||
|
||||
func (iter *Iterator) incrementDepth() (success bool) {
|
||||
iter.depth++
|
||||
if iter.depth <= maxDepth {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("incrementDepth", "exceeded max depth")
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) decrementDepth() (success bool) {
|
||||
iter.depth--
|
||||
if iter.depth >= 0 {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("decrementDepth", "unexpected negative nesting")
|
||||
return false
|
||||
}
|
64
vendor/github.com/json-iterator/go/iter_array.go
generated
vendored
Normal file
64
vendor/github.com/json-iterator/go/iter_array.go
generated
vendored
Normal file
|
@ -0,0 +1,64 @@
|
|||
package jsoniter
|
||||
|
||||
// ReadArray read array element, tells if the array has more element to read.
|
||||
func (iter *Iterator) ReadArray() (ret bool) {
|
||||
c := iter.nextToken()
|
||||
switch c {
|
||||
case 'n':
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return false // null
|
||||
case '[':
|
||||
c = iter.nextToken()
|
||||
if c != ']' {
|
||||
iter.unreadByte()
|
||||
return true
|
||||
}
|
||||
return false
|
||||
case ']':
|
||||
return false
|
||||
case ',':
|
||||
return true
|
||||
default:
|
||||
iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// ReadArrayCB read array with callback
|
||||
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == '[' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c != ']' {
|
||||
iter.unreadByte()
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return true // null
|
||||
}
|
||||
iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
|
||||
return false
|
||||
}
|
339
vendor/github.com/json-iterator/go/iter_float.go
generated
vendored
Normal file
339
vendor/github.com/json-iterator/go/iter_float.go
generated
vendored
Normal file
|
@ -0,0 +1,339 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"math/big"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var floatDigits []int8
|
||||
|
||||
const invalidCharForNumber = int8(-1)
|
||||
const endOfNumber = int8(-2)
|
||||
const dotInNumber = int8(-3)
|
||||
|
||||
func init() {
|
||||
floatDigits = make([]int8, 256)
|
||||
for i := 0; i < len(floatDigits); i++ {
|
||||
floatDigits[i] = invalidCharForNumber
|
||||
}
|
||||
for i := int8('0'); i <= int8('9'); i++ {
|
||||
floatDigits[i] = i - int8('0')
|
||||
}
|
||||
floatDigits[','] = endOfNumber
|
||||
floatDigits[']'] = endOfNumber
|
||||
floatDigits['}'] = endOfNumber
|
||||
floatDigits[' '] = endOfNumber
|
||||
floatDigits['\t'] = endOfNumber
|
||||
floatDigits['\n'] = endOfNumber
|
||||
floatDigits['.'] = dotInNumber
|
||||
}
|
||||
|
||||
// ReadBigFloat read big.Float
|
||||
func (iter *Iterator) ReadBigFloat() (ret *big.Float) {
|
||||
str := iter.readNumberAsString()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return nil
|
||||
}
|
||||
prec := 64
|
||||
if len(str) > prec {
|
||||
prec = len(str)
|
||||
}
|
||||
val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero)
|
||||
if err != nil {
|
||||
iter.Error = err
|
||||
return nil
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
// ReadBigInt read big.Int
|
||||
func (iter *Iterator) ReadBigInt() (ret *big.Int) {
|
||||
str := iter.readNumberAsString()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return nil
|
||||
}
|
||||
ret = big.NewInt(0)
|
||||
var success bool
|
||||
ret, success = ret.SetString(str, 10)
|
||||
if !success {
|
||||
iter.ReportError("ReadBigInt", "invalid big int")
|
||||
return nil
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
//ReadFloat32 read float32
|
||||
func (iter *Iterator) ReadFloat32() (ret float32) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
return -iter.readPositiveFloat32()
|
||||
}
|
||||
iter.unreadByte()
|
||||
return iter.readPositiveFloat32()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readPositiveFloat32() (ret float32) {
|
||||
i := iter.head
|
||||
// first char
|
||||
if i == iter.tail {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
c := iter.buf[i]
|
||||
i++
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case invalidCharForNumber:
|
||||
return iter.readFloat32SlowPath()
|
||||
case endOfNumber:
|
||||
iter.ReportError("readFloat32", "empty number")
|
||||
return
|
||||
case dotInNumber:
|
||||
iter.ReportError("readFloat32", "leading dot is invalid")
|
||||
return
|
||||
case 0:
|
||||
if i == iter.tail {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
c = iter.buf[i]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
iter.ReportError("readFloat32", "leading zero is invalid")
|
||||
return
|
||||
}
|
||||
}
|
||||
value := uint64(ind)
|
||||
// chars before dot
|
||||
non_decimal_loop:
|
||||
for ; i < iter.tail; i++ {
|
||||
c = iter.buf[i]
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case invalidCharForNumber:
|
||||
return iter.readFloat32SlowPath()
|
||||
case endOfNumber:
|
||||
iter.head = i
|
||||
return float32(value)
|
||||
case dotInNumber:
|
||||
break non_decimal_loop
|
||||
}
|
||||
if value > uint64SafeToMultiple10 {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
|
||||
}
|
||||
// chars after dot
|
||||
if c == '.' {
|
||||
i++
|
||||
decimalPlaces := 0
|
||||
if i == iter.tail {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
for ; i < iter.tail; i++ {
|
||||
c = iter.buf[i]
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case endOfNumber:
|
||||
if decimalPlaces > 0 && decimalPlaces < len(pow10) {
|
||||
iter.head = i
|
||||
return float32(float64(value) / float64(pow10[decimalPlaces]))
|
||||
}
|
||||
// too many decimal places
|
||||
return iter.readFloat32SlowPath()
|
||||
case invalidCharForNumber, dotInNumber:
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
decimalPlaces++
|
||||
if value > uint64SafeToMultiple10 {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind)
|
||||
}
|
||||
}
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readNumberAsString() (ret string) {
|
||||
strBuf := [16]byte{}
|
||||
str := strBuf[0:0]
|
||||
load_loop:
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
str = append(str, c)
|
||||
continue
|
||||
default:
|
||||
iter.head = i
|
||||
break load_loop
|
||||
}
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
if len(str) == 0 {
|
||||
iter.ReportError("readNumberAsString", "invalid number")
|
||||
}
|
||||
return *(*string)(unsafe.Pointer(&str))
|
||||
}
|
||||
|
||||
func (iter *Iterator) readFloat32SlowPath() (ret float32) {
|
||||
str := iter.readNumberAsString()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
errMsg := validateFloat(str)
|
||||
if errMsg != "" {
|
||||
iter.ReportError("readFloat32SlowPath", errMsg)
|
||||
return
|
||||
}
|
||||
val, err := strconv.ParseFloat(str, 32)
|
||||
if err != nil {
|
||||
iter.Error = err
|
||||
return
|
||||
}
|
||||
return float32(val)
|
||||
}
|
||||
|
||||
// ReadFloat64 read float64
|
||||
func (iter *Iterator) ReadFloat64() (ret float64) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
return -iter.readPositiveFloat64()
|
||||
}
|
||||
iter.unreadByte()
|
||||
return iter.readPositiveFloat64()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readPositiveFloat64() (ret float64) {
|
||||
i := iter.head
|
||||
// first char
|
||||
if i == iter.tail {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
c := iter.buf[i]
|
||||
i++
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case invalidCharForNumber:
|
||||
return iter.readFloat64SlowPath()
|
||||
case endOfNumber:
|
||||
iter.ReportError("readFloat64", "empty number")
|
||||
return
|
||||
case dotInNumber:
|
||||
iter.ReportError("readFloat64", "leading dot is invalid")
|
||||
return
|
||||
case 0:
|
||||
if i == iter.tail {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
c = iter.buf[i]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
iter.ReportError("readFloat64", "leading zero is invalid")
|
||||
return
|
||||
}
|
||||
}
|
||||
value := uint64(ind)
|
||||
// chars before dot
|
||||
non_decimal_loop:
|
||||
for ; i < iter.tail; i++ {
|
||||
c = iter.buf[i]
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case invalidCharForNumber:
|
||||
return iter.readFloat64SlowPath()
|
||||
case endOfNumber:
|
||||
iter.head = i
|
||||
return float64(value)
|
||||
case dotInNumber:
|
||||
break non_decimal_loop
|
||||
}
|
||||
if value > uint64SafeToMultiple10 {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
|
||||
}
|
||||
// chars after dot
|
||||
if c == '.' {
|
||||
i++
|
||||
decimalPlaces := 0
|
||||
if i == iter.tail {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
for ; i < iter.tail; i++ {
|
||||
c = iter.buf[i]
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
case endOfNumber:
|
||||
if decimalPlaces > 0 && decimalPlaces < len(pow10) {
|
||||
iter.head = i
|
||||
return float64(value) / float64(pow10[decimalPlaces])
|
||||
}
|
||||
// too many decimal places
|
||||
return iter.readFloat64SlowPath()
|
||||
case invalidCharForNumber, dotInNumber:
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
decimalPlaces++
|
||||
if value > uint64SafeToMultiple10 {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind)
|
||||
}
|
||||
}
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readFloat64SlowPath() (ret float64) {
|
||||
str := iter.readNumberAsString()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
errMsg := validateFloat(str)
|
||||
if errMsg != "" {
|
||||
iter.ReportError("readFloat64SlowPath", errMsg)
|
||||
return
|
||||
}
|
||||
val, err := strconv.ParseFloat(str, 64)
|
||||
if err != nil {
|
||||
iter.Error = err
|
||||
return
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func validateFloat(str string) string {
|
||||
// strconv.ParseFloat is not validating `1.` or `1.e1`
|
||||
if len(str) == 0 {
|
||||
return "empty number"
|
||||
}
|
||||
if str[0] == '-' {
|
||||
return "-- is not valid"
|
||||
}
|
||||
dotPos := strings.IndexByte(str, '.')
|
||||
if dotPos != -1 {
|
||||
if dotPos == len(str)-1 {
|
||||
return "dot can not be last character"
|
||||
}
|
||||
switch str[dotPos+1] {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
default:
|
||||
return "missing digit after dot"
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// ReadNumber read json.Number
|
||||
func (iter *Iterator) ReadNumber() (ret json.Number) {
|
||||
return json.Number(iter.readNumberAsString())
|
||||
}
|
345
vendor/github.com/json-iterator/go/iter_int.go
generated
vendored
Normal file
345
vendor/github.com/json-iterator/go/iter_int.go
generated
vendored
Normal file
|
@ -0,0 +1,345 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
var intDigits []int8
|
||||
|
||||
const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
|
||||
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
|
||||
|
||||
func init() {
|
||||
intDigits = make([]int8, 256)
|
||||
for i := 0; i < len(intDigits); i++ {
|
||||
intDigits[i] = invalidCharForNumber
|
||||
}
|
||||
for i := int8('0'); i <= int8('9'); i++ {
|
||||
intDigits[i] = i - int8('0')
|
||||
}
|
||||
}
|
||||
|
||||
// ReadUint read uint
|
||||
func (iter *Iterator) ReadUint() uint {
|
||||
if strconv.IntSize == 32 {
|
||||
return uint(iter.ReadUint32())
|
||||
}
|
||||
return uint(iter.ReadUint64())
|
||||
}
|
||||
|
||||
// ReadInt read int
|
||||
func (iter *Iterator) ReadInt() int {
|
||||
if strconv.IntSize == 32 {
|
||||
return int(iter.ReadInt32())
|
||||
}
|
||||
return int(iter.ReadInt64())
|
||||
}
|
||||
|
||||
// ReadInt8 read int8
|
||||
func (iter *Iterator) ReadInt8() (ret int8) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
val := iter.readUint32(iter.readByte())
|
||||
if val > math.MaxInt8+1 {
|
||||
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return -int8(val)
|
||||
}
|
||||
val := iter.readUint32(c)
|
||||
if val > math.MaxInt8 {
|
||||
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return int8(val)
|
||||
}
|
||||
|
||||
// ReadUint8 read uint8
|
||||
func (iter *Iterator) ReadUint8() (ret uint8) {
|
||||
val := iter.readUint32(iter.nextToken())
|
||||
if val > math.MaxUint8 {
|
||||
iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return uint8(val)
|
||||
}
|
||||
|
||||
// ReadInt16 read int16
|
||||
func (iter *Iterator) ReadInt16() (ret int16) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
val := iter.readUint32(iter.readByte())
|
||||
if val > math.MaxInt16+1 {
|
||||
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return -int16(val)
|
||||
}
|
||||
val := iter.readUint32(c)
|
||||
if val > math.MaxInt16 {
|
||||
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return int16(val)
|
||||
}
|
||||
|
||||
// ReadUint16 read uint16
|
||||
func (iter *Iterator) ReadUint16() (ret uint16) {
|
||||
val := iter.readUint32(iter.nextToken())
|
||||
if val > math.MaxUint16 {
|
||||
iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return uint16(val)
|
||||
}
|
||||
|
||||
// ReadInt32 read int32
|
||||
func (iter *Iterator) ReadInt32() (ret int32) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
val := iter.readUint32(iter.readByte())
|
||||
if val > math.MaxInt32+1 {
|
||||
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return -int32(val)
|
||||
}
|
||||
val := iter.readUint32(c)
|
||||
if val > math.MaxInt32 {
|
||||
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
|
||||
return
|
||||
}
|
||||
return int32(val)
|
||||
}
|
||||
|
||||
// ReadUint32 read uint32
|
||||
func (iter *Iterator) ReadUint32() (ret uint32) {
|
||||
return iter.readUint32(iter.nextToken())
|
||||
}
|
||||
|
||||
func (iter *Iterator) readUint32(c byte) (ret uint32) {
|
||||
ind := intDigits[c]
|
||||
if ind == 0 {
|
||||
iter.assertInteger()
|
||||
return 0 // single zero
|
||||
}
|
||||
if ind == invalidCharForNumber {
|
||||
iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
|
||||
return
|
||||
}
|
||||
value := uint32(ind)
|
||||
if iter.tail-iter.head > 10 {
|
||||
i := iter.head
|
||||
ind2 := intDigits[iter.buf[i]]
|
||||
if ind2 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
i++
|
||||
ind3 := intDigits[iter.buf[i]]
|
||||
if ind3 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*10 + uint32(ind2)
|
||||
}
|
||||
//iter.head = i + 1
|
||||
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
|
||||
i++
|
||||
ind4 := intDigits[iter.buf[i]]
|
||||
if ind4 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*100 + uint32(ind2)*10 + uint32(ind3)
|
||||
}
|
||||
i++
|
||||
ind5 := intDigits[iter.buf[i]]
|
||||
if ind5 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
|
||||
}
|
||||
i++
|
||||
ind6 := intDigits[iter.buf[i]]
|
||||
if ind6 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
|
||||
}
|
||||
i++
|
||||
ind7 := intDigits[iter.buf[i]]
|
||||
if ind7 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
|
||||
}
|
||||
i++
|
||||
ind8 := intDigits[iter.buf[i]]
|
||||
if ind8 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
|
||||
}
|
||||
i++
|
||||
ind9 := intDigits[iter.buf[i]]
|
||||
value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
|
||||
iter.head = i
|
||||
if ind9 == invalidCharForNumber {
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
ind = intDigits[iter.buf[i]]
|
||||
if ind == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
if value > uint32SafeToMultiply10 {
|
||||
value2 := (value << 3) + (value << 1) + uint32(ind)
|
||||
if value2 < value {
|
||||
iter.ReportError("readUint32", "overflow")
|
||||
return
|
||||
}
|
||||
value = value2
|
||||
continue
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint32(ind)
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ReadInt64 read int64
|
||||
func (iter *Iterator) ReadInt64() (ret int64) {
|
||||
c := iter.nextToken()
|
||||
if c == '-' {
|
||||
val := iter.readUint64(iter.readByte())
|
||||
if val > math.MaxInt64+1 {
|
||||
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
|
||||
return
|
||||
}
|
||||
return -int64(val)
|
||||
}
|
||||
val := iter.readUint64(c)
|
||||
if val > math.MaxInt64 {
|
||||
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
|
||||
return
|
||||
}
|
||||
return int64(val)
|
||||
}
|
||||
|
||||
// ReadUint64 read uint64
|
||||
func (iter *Iterator) ReadUint64() uint64 {
|
||||
return iter.readUint64(iter.nextToken())
|
||||
}
|
||||
|
||||
func (iter *Iterator) readUint64(c byte) (ret uint64) {
|
||||
ind := intDigits[c]
|
||||
if ind == 0 {
|
||||
iter.assertInteger()
|
||||
return 0 // single zero
|
||||
}
|
||||
if ind == invalidCharForNumber {
|
||||
iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
|
||||
return
|
||||
}
|
||||
value := uint64(ind)
|
||||
if iter.tail-iter.head > 10 {
|
||||
i := iter.head
|
||||
ind2 := intDigits[iter.buf[i]]
|
||||
if ind2 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
i++
|
||||
ind3 := intDigits[iter.buf[i]]
|
||||
if ind3 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*10 + uint64(ind2)
|
||||
}
|
||||
//iter.head = i + 1
|
||||
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
|
||||
i++
|
||||
ind4 := intDigits[iter.buf[i]]
|
||||
if ind4 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*100 + uint64(ind2)*10 + uint64(ind3)
|
||||
}
|
||||
i++
|
||||
ind5 := intDigits[iter.buf[i]]
|
||||
if ind5 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
|
||||
}
|
||||
i++
|
||||
ind6 := intDigits[iter.buf[i]]
|
||||
if ind6 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
|
||||
}
|
||||
i++
|
||||
ind7 := intDigits[iter.buf[i]]
|
||||
if ind7 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
|
||||
}
|
||||
i++
|
||||
ind8 := intDigits[iter.buf[i]]
|
||||
if ind8 == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
|
||||
}
|
||||
i++
|
||||
ind9 := intDigits[iter.buf[i]]
|
||||
value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
|
||||
iter.head = i
|
||||
if ind9 == invalidCharForNumber {
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
ind = intDigits[iter.buf[i]]
|
||||
if ind == invalidCharForNumber {
|
||||
iter.head = i
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
if value > uint64SafeToMultiple10 {
|
||||
value2 := (value << 3) + (value << 1) + uint64(ind)
|
||||
if value2 < value {
|
||||
iter.ReportError("readUint64", "overflow")
|
||||
return
|
||||
}
|
||||
value = value2
|
||||
continue
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind)
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.assertInteger()
|
||||
return value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) assertInteger() {
|
||||
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
|
||||
iter.ReportError("assertInteger", "can not decode float as int")
|
||||
}
|
||||
}
|
267
vendor/github.com/json-iterator/go/iter_object.go
generated
vendored
Normal file
267
vendor/github.com/json-iterator/go/iter_object.go
generated
vendored
Normal file
|
@ -0,0 +1,267 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ReadObject read one field from object.
|
||||
// If object ended, returns empty string.
|
||||
// Otherwise, returns the field name.
|
||||
func (iter *Iterator) ReadObject() (ret string) {
|
||||
c := iter.nextToken()
|
||||
switch c {
|
||||
case 'n':
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return "" // null
|
||||
case '{':
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field := iter.ReadString()
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
return field
|
||||
}
|
||||
if c == '}' {
|
||||
return "" // end of object
|
||||
}
|
||||
iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
|
||||
return
|
||||
case ',':
|
||||
field := iter.ReadString()
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
return field
|
||||
case '}':
|
||||
return "" // end of object
|
||||
default:
|
||||
iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// CaseInsensitive
|
||||
func (iter *Iterator) readFieldHash() int64 {
|
||||
hash := int64(0x811c9dc5)
|
||||
c := iter.nextToken()
|
||||
if c != '"' {
|
||||
iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c}))
|
||||
return 0
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
// require ascii string and no escape
|
||||
b := iter.buf[i]
|
||||
if b == '\\' {
|
||||
iter.head = i
|
||||
for _, b := range iter.readStringSlowPath() {
|
||||
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
|
||||
b += 'a' - 'A'
|
||||
}
|
||||
hash ^= int64(b)
|
||||
hash *= 0x1000193
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
|
||||
return 0
|
||||
}
|
||||
return hash
|
||||
}
|
||||
if b == '"' {
|
||||
iter.head = i + 1
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
|
||||
return 0
|
||||
}
|
||||
return hash
|
||||
}
|
||||
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
|
||||
b += 'a' - 'A'
|
||||
}
|
||||
hash ^= int64(b)
|
||||
hash *= 0x1000193
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.ReportError("readFieldHash", `incomplete field name`)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func calcHash(str string, caseSensitive bool) int64 {
|
||||
if !caseSensitive {
|
||||
str = strings.ToLower(str)
|
||||
}
|
||||
hash := int64(0x811c9dc5)
|
||||
for _, b := range []byte(str) {
|
||||
hash ^= int64(b)
|
||||
hash *= 0x1000193
|
||||
}
|
||||
return int64(hash)
|
||||
}
|
||||
|
||||
// ReadObjectCB read object with callback, the key is ascii only and field name not copied
|
||||
func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
var field string
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field = iter.ReadString()
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
field = iter.ReadString()
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadObjectCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadObjectCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return true // null
|
||||
}
|
||||
iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
|
||||
return false
|
||||
}
|
||||
|
||||
// ReadMapCB read map with callback, the key can be any string
|
||||
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field := iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
field = iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadMapCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadMapCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return true // null
|
||||
}
|
||||
iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) readObjectStart() bool {
|
||||
c := iter.nextToken()
|
||||
if c == '{' {
|
||||
c = iter.nextToken()
|
||||
if c == '}' {
|
||||
return false
|
||||
}
|
||||
iter.unreadByte()
|
||||
return true
|
||||
} else if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return false
|
||||
}
|
||||
iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
|
||||
str := iter.ReadStringAsSlice()
|
||||
if iter.skipWhitespacesWithoutLoadMore() {
|
||||
if ret == nil {
|
||||
ret = make([]byte, len(str))
|
||||
copy(ret, str)
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
return
|
||||
}
|
||||
}
|
||||
if iter.buf[iter.head] != ':' {
|
||||
iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
|
||||
return
|
||||
}
|
||||
iter.head++
|
||||
if iter.skipWhitespacesWithoutLoadMore() {
|
||||
if ret == nil {
|
||||
ret = make([]byte, len(str))
|
||||
copy(ret, str)
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
return
|
||||
}
|
||||
}
|
||||
if ret == nil {
|
||||
return str
|
||||
}
|
||||
return ret
|
||||
}
|
130
vendor/github.com/json-iterator/go/iter_skip.go
generated
vendored
Normal file
130
vendor/github.com/json-iterator/go/iter_skip.go
generated
vendored
Normal file
|
@ -0,0 +1,130 @@
|
|||
package jsoniter
|
||||
|
||||
import "fmt"
|
||||
|
||||
// ReadNil reads a json object as nil and
|
||||
// returns whether it's a nil or not
|
||||
func (iter *Iterator) ReadNil() (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l') // null
|
||||
return true
|
||||
}
|
||||
iter.unreadByte()
|
||||
return false
|
||||
}
|
||||
|
||||
// ReadBool reads a json object as BoolValue
|
||||
func (iter *Iterator) ReadBool() (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == 't' {
|
||||
iter.skipThreeBytes('r', 'u', 'e')
|
||||
return true
|
||||
}
|
||||
if c == 'f' {
|
||||
iter.skipFourBytes('a', 'l', 's', 'e')
|
||||
return false
|
||||
}
|
||||
iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
|
||||
// SkipAndReturnBytes skip next JSON element, and return its content as []byte.
|
||||
// The []byte can be kept, it is a copy of data.
|
||||
func (iter *Iterator) SkipAndReturnBytes() []byte {
|
||||
iter.startCapture(iter.head)
|
||||
iter.Skip()
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
// SkipAndAppendBytes skips next JSON element and appends its content to
|
||||
// buffer, returning the result.
|
||||
func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
|
||||
iter.startCaptureTo(buf, iter.head)
|
||||
iter.Skip()
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
|
||||
if iter.captured != nil {
|
||||
panic("already in capture mode")
|
||||
}
|
||||
iter.captureStartedAt = captureStartedAt
|
||||
iter.captured = buf
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCapture(captureStartedAt int) {
|
||||
iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
|
||||
}
|
||||
|
||||
func (iter *Iterator) stopCapture() []byte {
|
||||
if iter.captured == nil {
|
||||
panic("not in capture mode")
|
||||
}
|
||||
captured := iter.captured
|
||||
remaining := iter.buf[iter.captureStartedAt:iter.head]
|
||||
iter.captureStartedAt = -1
|
||||
iter.captured = nil
|
||||
return append(captured, remaining...)
|
||||
}
|
||||
|
||||
// Skip skips a json object and positions to relatively the next json object
|
||||
func (iter *Iterator) Skip() {
|
||||
c := iter.nextToken()
|
||||
switch c {
|
||||
case '"':
|
||||
iter.skipString()
|
||||
case 'n':
|
||||
iter.skipThreeBytes('u', 'l', 'l') // null
|
||||
case 't':
|
||||
iter.skipThreeBytes('r', 'u', 'e') // true
|
||||
case 'f':
|
||||
iter.skipFourBytes('a', 'l', 's', 'e') // false
|
||||
case '0':
|
||||
iter.unreadByte()
|
||||
iter.ReadFloat32()
|
||||
case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
iter.skipNumber()
|
||||
case '[':
|
||||
iter.skipArray()
|
||||
case '{':
|
||||
iter.skipObject()
|
||||
default:
|
||||
iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) {
|
||||
if iter.readByte() != b1 {
|
||||
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b2 {
|
||||
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b3 {
|
||||
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b4 {
|
||||
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) {
|
||||
if iter.readByte() != b1 {
|
||||
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b2 {
|
||||
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
|
||||
return
|
||||
}
|
||||
if iter.readByte() != b3 {
|
||||
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
|
||||
return
|
||||
}
|
||||
}
|
163
vendor/github.com/json-iterator/go/iter_skip_sloppy.go
generated
vendored
Normal file
163
vendor/github.com/json-iterator/go/iter_skip_sloppy.go
generated
vendored
Normal file
|
@ -0,0 +1,163 @@
|
|||
//+build jsoniter_sloppy
|
||||
|
||||
package jsoniter
|
||||
|
||||
// sloppy but faster implementation, do not validate the input json
|
||||
|
||||
func (iter *Iterator) skipNumber() {
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t', ',', '}', ']':
|
||||
iter.head = i
|
||||
return
|
||||
}
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipArray() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
case '"': // If inside string, skip it
|
||||
iter.head = i + 1
|
||||
iter.skipString()
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '[': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case ']': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
iter.head = i + 1
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.ReportError("skipObject", "incomplete array")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipObject() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
case '"': // If inside string, skip it
|
||||
iter.head = i + 1
|
||||
iter.skipString()
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '{': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case '}': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
iter.head = i + 1
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
if !iter.loadMore() {
|
||||
iter.ReportError("skipObject", "incomplete object")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipString() {
|
||||
for {
|
||||
end, escaped := iter.findStringEnd()
|
||||
if end == -1 {
|
||||
if !iter.loadMore() {
|
||||
iter.ReportError("skipString", "incomplete string")
|
||||
return
|
||||
}
|
||||
if escaped {
|
||||
iter.head = 1 // skip the first char as last char read is \
|
||||
}
|
||||
} else {
|
||||
iter.head = end
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go
|
||||
// Tries to find the end of string
|
||||
// Support if string contains escaped quote symbols.
|
||||
func (iter *Iterator) findStringEnd() (int, bool) {
|
||||
escaped := false
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
if c == '"' {
|
||||
if !escaped {
|
||||
return i + 1, false
|
||||
}
|
||||
j := i - 1
|
||||
for {
|
||||
if j < iter.head || iter.buf[j] != '\\' {
|
||||
// even number of backslashes
|
||||
// either end of buffer, or " found
|
||||
return i + 1, true
|
||||
}
|
||||
j--
|
||||
if j < iter.head || iter.buf[j] != '\\' {
|
||||
// odd number of backslashes
|
||||
// it is \" or \\\"
|
||||
break
|
||||
}
|
||||
j--
|
||||
}
|
||||
} else if c == '\\' {
|
||||
escaped = true
|
||||
}
|
||||
}
|
||||
j := iter.tail - 1
|
||||
for {
|
||||
if j < iter.head || iter.buf[j] != '\\' {
|
||||
// even number of backslashes
|
||||
// either end of buffer, or " found
|
||||
return -1, false // do not end with \
|
||||
}
|
||||
j--
|
||||
if j < iter.head || iter.buf[j] != '\\' {
|
||||
// odd number of backslashes
|
||||
// it is \" or \\\"
|
||||
break
|
||||
}
|
||||
j--
|
||||
|
||||
}
|
||||
return -1, true // end with \
|
||||
}
|
99
vendor/github.com/json-iterator/go/iter_skip_strict.go
generated
vendored
Normal file
99
vendor/github.com/json-iterator/go/iter_skip_strict.go
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
//+build !jsoniter_sloppy
|
||||
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
func (iter *Iterator) skipNumber() {
|
||||
if !iter.trySkipNumber() {
|
||||
iter.unreadByte()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
iter.ReadFloat64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = nil
|
||||
iter.ReadBigFloat()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) trySkipNumber() bool {
|
||||
dotFound := false
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
case '.':
|
||||
if dotFound {
|
||||
iter.ReportError("validateNumber", `more than one dot found in number`)
|
||||
return true // already failed
|
||||
}
|
||||
if i+1 == iter.tail {
|
||||
return false
|
||||
}
|
||||
c = iter.buf[i+1]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
default:
|
||||
iter.ReportError("validateNumber", `missing digit after dot`)
|
||||
return true // already failed
|
||||
}
|
||||
dotFound = true
|
||||
default:
|
||||
switch c {
|
||||
case ',', ']', '}', ' ', '\t', '\n', '\r':
|
||||
if iter.head == i {
|
||||
return false // if - without following digits
|
||||
}
|
||||
iter.head = i
|
||||
return true // must be valid
|
||||
}
|
||||
return false // may be invalid
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipString() {
|
||||
if !iter.trySkipString() {
|
||||
iter.unreadByte()
|
||||
iter.ReadString()
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) trySkipString() bool {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
if c == '"' {
|
||||
iter.head = i + 1
|
||||
return true // valid
|
||||
} else if c == '\\' {
|
||||
return false
|
||||
} else if c < ' ' {
|
||||
iter.ReportError("trySkipString",
|
||||
fmt.Sprintf(`invalid control character found: %d`, c))
|
||||
return true // already failed
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipObject() {
|
||||
iter.unreadByte()
|
||||
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
func (iter *Iterator) skipArray() {
|
||||
iter.unreadByte()
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
}
|
215
vendor/github.com/json-iterator/go/iter_str.go
generated
vendored
Normal file
215
vendor/github.com/json-iterator/go/iter_str.go
generated
vendored
Normal file
|
@ -0,0 +1,215 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf16"
|
||||
)
|
||||
|
||||
// ReadString read string from iterator
|
||||
func (iter *Iterator) ReadString() (ret string) {
|
||||
c := iter.nextToken()
|
||||
if c == '"' {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
c := iter.buf[i]
|
||||
if c == '"' {
|
||||
ret = string(iter.buf[iter.head:i])
|
||||
iter.head = i + 1
|
||||
return ret
|
||||
} else if c == '\\' {
|
||||
break
|
||||
} else if c < ' ' {
|
||||
iter.ReportError("ReadString",
|
||||
fmt.Sprintf(`invalid control character found: %d`, c))
|
||||
return
|
||||
}
|
||||
}
|
||||
return iter.readStringSlowPath()
|
||||
} else if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return ""
|
||||
}
|
||||
iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
|
||||
func (iter *Iterator) readStringSlowPath() (ret string) {
|
||||
var str []byte
|
||||
var c byte
|
||||
for iter.Error == nil {
|
||||
c = iter.readByte()
|
||||
if c == '"' {
|
||||
return string(str)
|
||||
}
|
||||
if c == '\\' {
|
||||
c = iter.readByte()
|
||||
str = iter.readEscapedChar(c, str)
|
||||
} else {
|
||||
str = append(str, c)
|
||||
}
|
||||
}
|
||||
iter.ReportError("readStringSlowPath", "unexpected end of input")
|
||||
return
|
||||
}
|
||||
|
||||
func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte {
|
||||
switch c {
|
||||
case 'u':
|
||||
r := iter.readU4()
|
||||
if utf16.IsSurrogate(r) {
|
||||
c = iter.readByte()
|
||||
if iter.Error != nil {
|
||||
return nil
|
||||
}
|
||||
if c != '\\' {
|
||||
iter.unreadByte()
|
||||
str = appendRune(str, r)
|
||||
return str
|
||||
}
|
||||
c = iter.readByte()
|
||||
if iter.Error != nil {
|
||||
return nil
|
||||
}
|
||||
if c != 'u' {
|
||||
str = appendRune(str, r)
|
||||
return iter.readEscapedChar(c, str)
|
||||
}
|
||||
r2 := iter.readU4()
|
||||
if iter.Error != nil {
|
||||
return nil
|
||||
}
|
||||
combined := utf16.DecodeRune(r, r2)
|
||||
if combined == '\uFFFD' {
|
||||
str = appendRune(str, r)
|
||||
str = appendRune(str, r2)
|
||||
} else {
|
||||
str = appendRune(str, combined)
|
||||
}
|
||||
} else {
|
||||
str = appendRune(str, r)
|
||||
}
|
||||
case '"':
|
||||
str = append(str, '"')
|
||||
case '\\':
|
||||
str = append(str, '\\')
|
||||
case '/':
|
||||
str = append(str, '/')
|
||||
case 'b':
|
||||
str = append(str, '\b')
|
||||
case 'f':
|
||||
str = append(str, '\f')
|
||||
case 'n':
|
||||
str = append(str, '\n')
|
||||
case 'r':
|
||||
str = append(str, '\r')
|
||||
case 't':
|
||||
str = append(str, '\t')
|
||||
default:
|
||||
iter.ReportError("readEscapedChar",
|
||||
`invalid escape char after \`)
|
||||
return nil
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
// ReadStringAsSlice read string from iterator without copying into string form.
|
||||
// The []byte can not be kept, as it will change after next iterator call.
|
||||
func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
|
||||
c := iter.nextToken()
|
||||
if c == '"' {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
// require ascii string and no escape
|
||||
// for: field name, base64, number
|
||||
if iter.buf[i] == '"' {
|
||||
// fast path: reuse the underlying buffer
|
||||
ret = iter.buf[iter.head:i]
|
||||
iter.head = i + 1
|
||||
return ret
|
||||
}
|
||||
}
|
||||
readLen := iter.tail - iter.head
|
||||
copied := make([]byte, readLen, readLen*2)
|
||||
copy(copied, iter.buf[iter.head:iter.tail])
|
||||
iter.head = iter.tail
|
||||
for iter.Error == nil {
|
||||
c := iter.readByte()
|
||||
if c == '"' {
|
||||
return copied
|
||||
}
|
||||
copied = append(copied, c)
|
||||
}
|
||||
return copied
|
||||
}
|
||||
iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
|
||||
func (iter *Iterator) readU4() (ret rune) {
|
||||
for i := 0; i < 4; i++ {
|
||||
c := iter.readByte()
|
||||
if iter.Error != nil {
|
||||
return
|
||||
}
|
||||
if c >= '0' && c <= '9' {
|
||||
ret = ret*16 + rune(c-'0')
|
||||
} else if c >= 'a' && c <= 'f' {
|
||||
ret = ret*16 + rune(c-'a'+10)
|
||||
} else if c >= 'A' && c <= 'F' {
|
||||
ret = ret*16 + rune(c-'A'+10)
|
||||
} else {
|
||||
iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
const (
|
||||
t1 = 0x00 // 0000 0000
|
||||
tx = 0x80 // 1000 0000
|
||||
t2 = 0xC0 // 1100 0000
|
||||
t3 = 0xE0 // 1110 0000
|
||||
t4 = 0xF0 // 1111 0000
|
||||
t5 = 0xF8 // 1111 1000
|
||||
|
||||
maskx = 0x3F // 0011 1111
|
||||
mask2 = 0x1F // 0001 1111
|
||||
mask3 = 0x0F // 0000 1111
|
||||
mask4 = 0x07 // 0000 0111
|
||||
|
||||
rune1Max = 1<<7 - 1
|
||||
rune2Max = 1<<11 - 1
|
||||
rune3Max = 1<<16 - 1
|
||||
|
||||
surrogateMin = 0xD800
|
||||
surrogateMax = 0xDFFF
|
||||
|
||||
maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
|
||||
runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
|
||||
)
|
||||
|
||||
func appendRune(p []byte, r rune) []byte {
|
||||
// Negative values are erroneous. Making it unsigned addresses the problem.
|
||||
switch i := uint32(r); {
|
||||
case i <= rune1Max:
|
||||
p = append(p, byte(r))
|
||||
return p
|
||||
case i <= rune2Max:
|
||||
p = append(p, t2|byte(r>>6))
|
||||
p = append(p, tx|byte(r)&maskx)
|
||||
return p
|
||||
case i > maxRune, surrogateMin <= i && i <= surrogateMax:
|
||||
r = runeError
|
||||
fallthrough
|
||||
case i <= rune3Max:
|
||||
p = append(p, t3|byte(r>>12))
|
||||
p = append(p, tx|byte(r>>6)&maskx)
|
||||
p = append(p, tx|byte(r)&maskx)
|
||||
return p
|
||||
default:
|
||||
p = append(p, t4|byte(r>>18))
|
||||
p = append(p, tx|byte(r>>12)&maskx)
|
||||
p = append(p, tx|byte(r>>6)&maskx)
|
||||
p = append(p, tx|byte(r)&maskx)
|
||||
return p
|
||||
}
|
||||
}
|
18
vendor/github.com/json-iterator/go/jsoniter.go
generated
vendored
Normal file
18
vendor/github.com/json-iterator/go/jsoniter.go
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
// Package jsoniter implements encoding and decoding of JSON as defined in
|
||||
// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
|
||||
// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
|
||||
// and variable type declarations (if any).
|
||||
// jsoniter interfaces gives 100% compatibility with code using standard lib.
|
||||
//
|
||||
// "JSON and Go"
|
||||
// (https://golang.org/doc/articles/json_and_go.html)
|
||||
// gives a description of how Marshal/Unmarshal operate
|
||||
// between arbitrary or predefined json objects and bytes,
|
||||
// and it applies to jsoniter.Marshal/Unmarshal as well.
|
||||
//
|
||||
// Besides, jsoniter.Iterator provides a different set of interfaces
|
||||
// iterating given bytes/string/reader
|
||||
// and yielding parsed elements one by one.
|
||||
// This set of interfaces reads input as required and gives
|
||||
// better performance.
|
||||
package jsoniter
|
42
vendor/github.com/json-iterator/go/pool.go
generated
vendored
Normal file
42
vendor/github.com/json-iterator/go/pool.go
generated
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// IteratorPool a thread safe pool of iterators with same configuration
|
||||
type IteratorPool interface {
|
||||
BorrowIterator(data []byte) *Iterator
|
||||
ReturnIterator(iter *Iterator)
|
||||
}
|
||||
|
||||
// StreamPool a thread safe pool of streams with same configuration
|
||||
type StreamPool interface {
|
||||
BorrowStream(writer io.Writer) *Stream
|
||||
ReturnStream(stream *Stream)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
|
||||
stream := cfg.streamPool.Get().(*Stream)
|
||||
stream.Reset(writer)
|
||||
return stream
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
|
||||
stream.out = nil
|
||||
stream.Error = nil
|
||||
stream.Attachment = nil
|
||||
cfg.streamPool.Put(stream)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
|
||||
iter := cfg.iteratorPool.Get().(*Iterator)
|
||||
iter.ResetBytes(data)
|
||||
return iter
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
|
||||
iter.Error = nil
|
||||
iter.Attachment = nil
|
||||
cfg.iteratorPool.Put(iter)
|
||||
}
|
337
vendor/github.com/json-iterator/go/reflect.go
generated
vendored
Normal file
337
vendor/github.com/json-iterator/go/reflect.go
generated
vendored
Normal file
|
@ -0,0 +1,337 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
// ValDecoder is an internal type registered to cache as needed.
|
||||
// Don't confuse jsoniter.ValDecoder with json.Decoder.
|
||||
// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
|
||||
//
|
||||
// Reflection on type to create decoders, which is then cached
|
||||
// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
|
||||
// 1. create instance of new value, for example *int will need a int to be allocated
|
||||
// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
|
||||
// 3. assignment to map, both key and value will be reflect.Value
|
||||
// For a simple struct binding, it will be reflect.Value free and allocation free
|
||||
type ValDecoder interface {
|
||||
Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
}
|
||||
|
||||
// ValEncoder is an internal type registered to cache as needed.
|
||||
// Don't confuse jsoniter.ValEncoder with json.Encoder.
|
||||
// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
|
||||
type ValEncoder interface {
|
||||
IsEmpty(ptr unsafe.Pointer) bool
|
||||
Encode(ptr unsafe.Pointer, stream *Stream)
|
||||
}
|
||||
|
||||
type checkIsEmpty interface {
|
||||
IsEmpty(ptr unsafe.Pointer) bool
|
||||
}
|
||||
|
||||
type ctx struct {
|
||||
*frozenConfig
|
||||
prefix string
|
||||
encoders map[reflect2.Type]ValEncoder
|
||||
decoders map[reflect2.Type]ValDecoder
|
||||
}
|
||||
|
||||
func (b *ctx) caseSensitive() bool {
|
||||
if b.frozenConfig == nil {
|
||||
// default is case-insensitive
|
||||
return false
|
||||
}
|
||||
return b.frozenConfig.caseSensitive
|
||||
}
|
||||
|
||||
func (b *ctx) append(prefix string) *ctx {
|
||||
return &ctx{
|
||||
frozenConfig: b.frozenConfig,
|
||||
prefix: b.prefix + " " + prefix,
|
||||
encoders: b.encoders,
|
||||
decoders: b.decoders,
|
||||
}
|
||||
}
|
||||
|
||||
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
|
||||
func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
depth := iter.depth
|
||||
cacheKey := reflect2.RTypeOf(obj)
|
||||
decoder := iter.cfg.getDecoderFromCache(cacheKey)
|
||||
if decoder == nil {
|
||||
typ := reflect2.TypeOf(obj)
|
||||
if typ.Kind() != reflect.Ptr {
|
||||
iter.ReportError("ReadVal", "can only unmarshal into pointer")
|
||||
return
|
||||
}
|
||||
decoder = iter.cfg.DecoderOf(typ)
|
||||
}
|
||||
ptr := reflect2.PtrOf(obj)
|
||||
if ptr == nil {
|
||||
iter.ReportError("ReadVal", "can not read into nil pointer")
|
||||
return
|
||||
}
|
||||
decoder.Decode(ptr, iter)
|
||||
if iter.depth != depth {
|
||||
iter.ReportError("ReadVal", "unexpected mismatched nesting")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
|
||||
func (stream *Stream) WriteVal(val interface{}) {
|
||||
if nil == val {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
cacheKey := reflect2.RTypeOf(val)
|
||||
encoder := stream.cfg.getEncoderFromCache(cacheKey)
|
||||
if encoder == nil {
|
||||
typ := reflect2.TypeOf(val)
|
||||
encoder = stream.cfg.EncoderOf(typ)
|
||||
}
|
||||
encoder.Encode(reflect2.PtrOf(val), stream)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) DecoderOf(typ reflect2.Type) ValDecoder {
|
||||
cacheKey := typ.RType()
|
||||
decoder := cfg.getDecoderFromCache(cacheKey)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
ctx := &ctx{
|
||||
frozenConfig: cfg,
|
||||
prefix: "",
|
||||
decoders: map[reflect2.Type]ValDecoder{},
|
||||
encoders: map[reflect2.Type]ValEncoder{},
|
||||
}
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
decoder = decoderOfType(ctx, ptrType.Elem())
|
||||
cfg.addDecoderToCache(cacheKey, decoder)
|
||||
return decoder
|
||||
}
|
||||
|
||||
func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := getTypeDecoderFromExtension(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfType(ctx, typ)
|
||||
for _, extension := range extensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
return decoder
|
||||
}
|
||||
|
||||
func createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := ctx.decoders[typ]
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
placeholder := &placeholderDecoder{}
|
||||
ctx.decoders[typ] = placeholder
|
||||
decoder = _createDecoderOfType(ctx, typ)
|
||||
placeholder.decoder = decoder
|
||||
return decoder
|
||||
}
|
||||
|
||||
func _createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := createDecoderOfJsonRawMessage(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfJsonNumber(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfMarshaler(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfAny(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
decoder = createDecoderOfNative(ctx, typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Interface:
|
||||
ifaceType, isIFace := typ.(*reflect2.UnsafeIFaceType)
|
||||
if isIFace {
|
||||
return &ifaceDecoder{valType: ifaceType}
|
||||
}
|
||||
return &efaceDecoder{}
|
||||
case reflect.Struct:
|
||||
return decoderOfStruct(ctx, typ)
|
||||
case reflect.Array:
|
||||
return decoderOfArray(ctx, typ)
|
||||
case reflect.Slice:
|
||||
return decoderOfSlice(ctx, typ)
|
||||
case reflect.Map:
|
||||
return decoderOfMap(ctx, typ)
|
||||
case reflect.Ptr:
|
||||
return decoderOfOptional(ctx, typ)
|
||||
default:
|
||||
return &lazyErrorDecoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
|
||||
}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) EncoderOf(typ reflect2.Type) ValEncoder {
|
||||
cacheKey := typ.RType()
|
||||
encoder := cfg.getEncoderFromCache(cacheKey)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
ctx := &ctx{
|
||||
frozenConfig: cfg,
|
||||
prefix: "",
|
||||
decoders: map[reflect2.Type]ValDecoder{},
|
||||
encoders: map[reflect2.Type]ValEncoder{},
|
||||
}
|
||||
encoder = encoderOfType(ctx, typ)
|
||||
if typ.LikePtr() {
|
||||
encoder = &onePtrEncoder{encoder}
|
||||
}
|
||||
cfg.addEncoderToCache(cacheKey, encoder)
|
||||
return encoder
|
||||
}
|
||||
|
||||
type onePtrEncoder struct {
|
||||
encoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *onePtrEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
|
||||
}
|
||||
|
||||
func (encoder *onePtrEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
|
||||
}
|
||||
|
||||
func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := getTypeEncoderFromExtension(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfType(ctx, typ)
|
||||
for _, extension := range extensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
|
||||
func createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := ctx.encoders[typ]
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
placeholder := &placeholderEncoder{}
|
||||
ctx.encoders[typ] = placeholder
|
||||
encoder = _createEncoderOfType(ctx, typ)
|
||||
placeholder.encoder = encoder
|
||||
return encoder
|
||||
}
|
||||
func _createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := createEncoderOfJsonRawMessage(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfJsonNumber(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfMarshaler(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfAny(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
encoder = createEncoderOfNative(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
kind := typ.Kind()
|
||||
switch kind {
|
||||
case reflect.Interface:
|
||||
return &dynamicEncoder{typ}
|
||||
case reflect.Struct:
|
||||
return encoderOfStruct(ctx, typ)
|
||||
case reflect.Array:
|
||||
return encoderOfArray(ctx, typ)
|
||||
case reflect.Slice:
|
||||
return encoderOfSlice(ctx, typ)
|
||||
case reflect.Map:
|
||||
return encoderOfMap(ctx, typ)
|
||||
case reflect.Ptr:
|
||||
return encoderOfOptional(ctx, typ)
|
||||
default:
|
||||
return &lazyErrorEncoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
|
||||
}
|
||||
}
|
||||
|
||||
type lazyErrorDecoder struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func (decoder *lazyErrorDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.WhatIsNext() != NilValue {
|
||||
if iter.Error == nil {
|
||||
iter.Error = decoder.err
|
||||
}
|
||||
} else {
|
||||
iter.Skip()
|
||||
}
|
||||
}
|
||||
|
||||
type lazyErrorEncoder struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func (encoder *lazyErrorEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if ptr == nil {
|
||||
stream.WriteNil()
|
||||
} else if stream.Error == nil {
|
||||
stream.Error = encoder.err
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *lazyErrorEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type placeholderDecoder struct {
|
||||
decoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.decoder.Decode(ptr, iter)
|
||||
}
|
||||
|
||||
type placeholderEncoder struct {
|
||||
encoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.encoder.Encode(ptr, stream)
|
||||
}
|
||||
|
||||
func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.encoder.IsEmpty(ptr)
|
||||
}
|
104
vendor/github.com/json-iterator/go/reflect_array.go
generated
vendored
Normal file
104
vendor/github.com/json-iterator/go/reflect_array.go
generated
vendored
Normal file
|
@ -0,0 +1,104 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
arrayType := typ.(*reflect2.UnsafeArrayType)
|
||||
decoder := decoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
|
||||
return &arrayDecoder{arrayType, decoder}
|
||||
}
|
||||
|
||||
func encoderOfArray(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
arrayType := typ.(*reflect2.UnsafeArrayType)
|
||||
if arrayType.Len() == 0 {
|
||||
return emptyArrayEncoder{}
|
||||
}
|
||||
encoder := encoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
|
||||
return &arrayEncoder{arrayType, encoder}
|
||||
}
|
||||
|
||||
type emptyArrayEncoder struct{}
|
||||
|
||||
func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteEmptyArray()
|
||||
}
|
||||
|
||||
func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
type arrayEncoder struct {
|
||||
arrayType *reflect2.UnsafeArrayType
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteArrayStart()
|
||||
elemPtr := unsafe.Pointer(ptr)
|
||||
encoder.elemEncoder.Encode(elemPtr, stream)
|
||||
for i := 1; i < encoder.arrayType.Len(); i++ {
|
||||
stream.WriteMore()
|
||||
elemPtr = encoder.arrayType.UnsafeGetIndex(ptr, i)
|
||||
encoder.elemEncoder.Encode(elemPtr, stream)
|
||||
}
|
||||
stream.WriteArrayEnd()
|
||||
if stream.Error != nil && stream.Error != io.EOF {
|
||||
stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type arrayDecoder struct {
|
||||
arrayType *reflect2.UnsafeArrayType
|
||||
elemDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.doDecode(ptr, iter)
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
c := iter.nextToken()
|
||||
arrayType := decoder.arrayType
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
return
|
||||
}
|
||||
if c != '[' {
|
||||
iter.ReportError("decode array", "expect [ or n, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == ']' {
|
||||
return
|
||||
}
|
||||
iter.unreadByte()
|
||||
elemPtr := arrayType.UnsafeGetIndex(ptr, 0)
|
||||
decoder.elemDecoder.Decode(elemPtr, iter)
|
||||
length := 1
|
||||
for c = iter.nextToken(); c == ','; c = iter.nextToken() {
|
||||
if length >= arrayType.Len() {
|
||||
iter.Skip()
|
||||
continue
|
||||
}
|
||||
idx := length
|
||||
length += 1
|
||||
elemPtr = arrayType.UnsafeGetIndex(ptr, idx)
|
||||
decoder.elemDecoder.Decode(elemPtr, iter)
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("decode array", "expect ], but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
70
vendor/github.com/json-iterator/go/reflect_dynamic.go
generated
vendored
Normal file
70
vendor/github.com/json-iterator/go/reflect_dynamic.go
generated
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/modern-go/reflect2"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type dynamicEncoder struct {
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (encoder *dynamicEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
stream.WriteVal(obj)
|
||||
}
|
||||
|
||||
func (encoder *dynamicEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.valType.UnsafeIndirect(ptr) == nil
|
||||
}
|
||||
|
||||
type efaceDecoder struct {
|
||||
}
|
||||
|
||||
func (decoder *efaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
pObj := (*interface{})(ptr)
|
||||
obj := *pObj
|
||||
if obj == nil {
|
||||
*pObj = iter.Read()
|
||||
return
|
||||
}
|
||||
typ := reflect2.TypeOf(obj)
|
||||
if typ.Kind() != reflect.Ptr {
|
||||
*pObj = iter.Read()
|
||||
return
|
||||
}
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
ptrElemType := ptrType.Elem()
|
||||
if iter.WhatIsNext() == NilValue {
|
||||
if ptrElemType.Kind() != reflect.Ptr {
|
||||
iter.skipFourBytes('n', 'u', 'l', 'l')
|
||||
*pObj = nil
|
||||
return
|
||||
}
|
||||
}
|
||||
if reflect2.IsNil(obj) {
|
||||
obj := ptrElemType.New()
|
||||
iter.ReadVal(obj)
|
||||
*pObj = obj
|
||||
return
|
||||
}
|
||||
iter.ReadVal(obj)
|
||||
}
|
||||
|
||||
type ifaceDecoder struct {
|
||||
valType *reflect2.UnsafeIFaceType
|
||||
}
|
||||
|
||||
func (decoder *ifaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.ReadNil() {
|
||||
decoder.valType.UnsafeSet(ptr, decoder.valType.UnsafeNew())
|
||||
return
|
||||
}
|
||||
obj := decoder.valType.UnsafeIndirect(ptr)
|
||||
if reflect2.IsNil(obj) {
|
||||
iter.ReportError("decode non empty interface", "can not unmarshal into nil")
|
||||
return
|
||||
}
|
||||
iter.ReadVal(obj)
|
||||
}
|
483
vendor/github.com/json-iterator/go/reflect_extension.go
generated
vendored
Normal file
483
vendor/github.com/json-iterator/go/reflect_extension.go
generated
vendored
Normal file
|
@ -0,0 +1,483 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var typeDecoders = map[string]ValDecoder{}
|
||||
var fieldDecoders = map[string]ValDecoder{}
|
||||
var typeEncoders = map[string]ValEncoder{}
|
||||
var fieldEncoders = map[string]ValEncoder{}
|
||||
var extensions = []Extension{}
|
||||
|
||||
// StructDescriptor describe how should we encode/decode the struct
|
||||
type StructDescriptor struct {
|
||||
Type reflect2.Type
|
||||
Fields []*Binding
|
||||
}
|
||||
|
||||
// GetField get one field from the descriptor by its name.
|
||||
// Can not use map here to keep field orders.
|
||||
func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding {
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
if binding.Field.Name() == fieldName {
|
||||
return binding
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Binding describe how should we encode/decode the struct field
|
||||
type Binding struct {
|
||||
levels []int
|
||||
Field reflect2.StructField
|
||||
FromNames []string
|
||||
ToNames []string
|
||||
Encoder ValEncoder
|
||||
Decoder ValDecoder
|
||||
}
|
||||
|
||||
// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder.
|
||||
// Can also rename fields by UpdateStructDescriptor.
|
||||
type Extension interface {
|
||||
UpdateStructDescriptor(structDescriptor *StructDescriptor)
|
||||
CreateMapKeyDecoder(typ reflect2.Type) ValDecoder
|
||||
CreateMapKeyEncoder(typ reflect2.Type) ValEncoder
|
||||
CreateDecoder(typ reflect2.Type) ValDecoder
|
||||
CreateEncoder(typ reflect2.Type) ValEncoder
|
||||
DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
|
||||
DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder
|
||||
}
|
||||
|
||||
// DummyExtension embed this type get dummy implementation for all methods of Extension
|
||||
type DummyExtension struct {
|
||||
}
|
||||
|
||||
// UpdateStructDescriptor No-op
|
||||
func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||
}
|
||||
|
||||
// CreateMapKeyDecoder No-op
|
||||
func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateMapKeyEncoder No-op
|
||||
func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateDecoder No-op
|
||||
func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateEncoder No-op
|
||||
func (extension *DummyExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// DecorateDecoder No-op
|
||||
func (extension *DummyExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
|
||||
return decoder
|
||||
}
|
||||
|
||||
// DecorateEncoder No-op
|
||||
func (extension *DummyExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
|
||||
return encoder
|
||||
}
|
||||
|
||||
type EncoderExtension map[reflect2.Type]ValEncoder
|
||||
|
||||
// UpdateStructDescriptor No-op
|
||||
func (extension EncoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||
}
|
||||
|
||||
// CreateDecoder No-op
|
||||
func (extension EncoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateEncoder get encoder from map
|
||||
func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
|
||||
return extension[typ]
|
||||
}
|
||||
|
||||
// CreateMapKeyDecoder No-op
|
||||
func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateMapKeyEncoder No-op
|
||||
func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// DecorateDecoder No-op
|
||||
func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
|
||||
return decoder
|
||||
}
|
||||
|
||||
// DecorateEncoder No-op
|
||||
func (extension EncoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
|
||||
return encoder
|
||||
}
|
||||
|
||||
type DecoderExtension map[reflect2.Type]ValDecoder
|
||||
|
||||
// UpdateStructDescriptor No-op
|
||||
func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||
}
|
||||
|
||||
// CreateMapKeyDecoder No-op
|
||||
func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateMapKeyEncoder No-op
|
||||
func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateDecoder get decoder from map
|
||||
func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||
return extension[typ]
|
||||
}
|
||||
|
||||
// CreateEncoder No-op
|
||||
func (extension DecoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// DecorateDecoder No-op
|
||||
func (extension DecoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
|
||||
return decoder
|
||||
}
|
||||
|
||||
// DecorateEncoder No-op
|
||||
func (extension DecoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
|
||||
return encoder
|
||||
}
|
||||
|
||||
type funcDecoder struct {
|
||||
fun DecoderFunc
|
||||
}
|
||||
|
||||
func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.fun(ptr, iter)
|
||||
}
|
||||
|
||||
type funcEncoder struct {
|
||||
fun EncoderFunc
|
||||
isEmptyFunc func(ptr unsafe.Pointer) bool
|
||||
}
|
||||
|
||||
func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.fun(ptr, stream)
|
||||
}
|
||||
|
||||
func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
if encoder.isEmptyFunc == nil {
|
||||
return false
|
||||
}
|
||||
return encoder.isEmptyFunc(ptr)
|
||||
}
|
||||
|
||||
// DecoderFunc the function form of TypeDecoder
|
||||
type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
|
||||
|
||||
// EncoderFunc the function form of TypeEncoder
|
||||
type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
|
||||
|
||||
// RegisterTypeDecoderFunc register TypeDecoder for a type with function
|
||||
func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) {
|
||||
typeDecoders[typ] = &funcDecoder{fun}
|
||||
}
|
||||
|
||||
// RegisterTypeDecoder register TypeDecoder for a typ
|
||||
func RegisterTypeDecoder(typ string, decoder ValDecoder) {
|
||||
typeDecoders[typ] = decoder
|
||||
}
|
||||
|
||||
// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function
|
||||
func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) {
|
||||
RegisterFieldDecoder(typ, field, &funcDecoder{fun})
|
||||
}
|
||||
|
||||
// RegisterFieldDecoder register TypeDecoder for a struct field
|
||||
func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) {
|
||||
fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder
|
||||
}
|
||||
|
||||
// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function
|
||||
func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
|
||||
typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc}
|
||||
}
|
||||
|
||||
// RegisterTypeEncoder register TypeEncoder for a type
|
||||
func RegisterTypeEncoder(typ string, encoder ValEncoder) {
|
||||
typeEncoders[typ] = encoder
|
||||
}
|
||||
|
||||
// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function
|
||||
func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
|
||||
RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc})
|
||||
}
|
||||
|
||||
// RegisterFieldEncoder register TypeEncoder for a struct field
|
||||
func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) {
|
||||
fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder
|
||||
}
|
||||
|
||||
// RegisterExtension register extension
|
||||
func RegisterExtension(extension Extension) {
|
||||
extensions = append(extensions, extension)
|
||||
}
|
||||
|
||||
func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := _getTypeDecoderFromExtension(ctx, typ)
|
||||
if decoder != nil {
|
||||
for _, extension := range extensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
}
|
||||
return decoder
|
||||
}
|
||||
func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
for _, extension := range extensions {
|
||||
decoder := extension.CreateDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
decoder := ctx.decoderExtension.CreateDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder := extension.CreateDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
typeName := typ.String()
|
||||
decoder = typeDecoders[typeName]
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr {
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
decoder := typeDecoders[ptrType.Elem().String()]
|
||||
if decoder != nil {
|
||||
return &OptionalDecoder{ptrType.Elem(), decoder}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := _getTypeEncoderFromExtension(ctx, typ)
|
||||
if encoder != nil {
|
||||
for _, extension := range extensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
|
||||
func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
for _, extension := range extensions {
|
||||
encoder := extension.CreateEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
encoder := ctx.encoderExtension.CreateEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder := extension.CreateEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
typeName := typ.String()
|
||||
encoder = typeEncoders[typeName]
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr {
|
||||
typePtr := typ.(*reflect2.UnsafePtrType)
|
||||
encoder := typeEncoders[typePtr.Elem().String()]
|
||||
if encoder != nil {
|
||||
return &OptionalEncoder{encoder}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
||||
structType := typ.(*reflect2.UnsafeStructType)
|
||||
embeddedBindings := []*Binding{}
|
||||
bindings := []*Binding{}
|
||||
for i := 0; i < structType.NumField(); i++ {
|
||||
field := structType.Field(i)
|
||||
tag, hastag := field.Tag().Lookup(ctx.getTagKey())
|
||||
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
|
||||
continue
|
||||
}
|
||||
if tag == "-" || field.Name() == "_" {
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
|
||||
if field.Type().Kind() == reflect.Struct {
|
||||
structDescriptor := describeStruct(ctx, field.Type())
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
binding.levels = append([]int{i}, binding.levels...)
|
||||
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
|
||||
binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
|
||||
binding.Decoder = &structFieldDecoder{field, binding.Decoder}
|
||||
embeddedBindings = append(embeddedBindings, binding)
|
||||
}
|
||||
continue
|
||||
} else if field.Type().Kind() == reflect.Ptr {
|
||||
ptrType := field.Type().(*reflect2.UnsafePtrType)
|
||||
if ptrType.Elem().Kind() == reflect.Struct {
|
||||
structDescriptor := describeStruct(ctx, ptrType.Elem())
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
binding.levels = append([]int{i}, binding.levels...)
|
||||
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
|
||||
binding.Encoder = &dereferenceEncoder{binding.Encoder}
|
||||
binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
|
||||
binding.Decoder = &dereferenceDecoder{ptrType.Elem(), binding.Decoder}
|
||||
binding.Decoder = &structFieldDecoder{field, binding.Decoder}
|
||||
embeddedBindings = append(embeddedBindings, binding)
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
fieldNames := calcFieldNames(field.Name(), tagParts[0], tag)
|
||||
fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name())
|
||||
decoder := fieldDecoders[fieldCacheKey]
|
||||
if decoder == nil {
|
||||
decoder = decoderOfType(ctx.append(field.Name()), field.Type())
|
||||
}
|
||||
encoder := fieldEncoders[fieldCacheKey]
|
||||
if encoder == nil {
|
||||
encoder = encoderOfType(ctx.append(field.Name()), field.Type())
|
||||
}
|
||||
binding := &Binding{
|
||||
Field: field,
|
||||
FromNames: fieldNames,
|
||||
ToNames: fieldNames,
|
||||
Decoder: decoder,
|
||||
Encoder: encoder,
|
||||
}
|
||||
binding.levels = []int{i}
|
||||
bindings = append(bindings, binding)
|
||||
}
|
||||
return createStructDescriptor(ctx, typ, bindings, embeddedBindings)
|
||||
}
|
||||
func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
|
||||
structDescriptor := &StructDescriptor{
|
||||
Type: typ,
|
||||
Fields: bindings,
|
||||
}
|
||||
for _, extension := range extensions {
|
||||
extension.UpdateStructDescriptor(structDescriptor)
|
||||
}
|
||||
ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
|
||||
ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
extension.UpdateStructDescriptor(structDescriptor)
|
||||
}
|
||||
processTags(structDescriptor, ctx.frozenConfig)
|
||||
// merge normal & embedded bindings & sort with original order
|
||||
allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))
|
||||
sort.Sort(allBindings)
|
||||
structDescriptor.Fields = allBindings
|
||||
return structDescriptor
|
||||
}
|
||||
|
||||
type sortableBindings []*Binding
|
||||
|
||||
func (bindings sortableBindings) Len() int {
|
||||
return len(bindings)
|
||||
}
|
||||
|
||||
func (bindings sortableBindings) Less(i, j int) bool {
|
||||
left := bindings[i].levels
|
||||
right := bindings[j].levels
|
||||
k := 0
|
||||
for {
|
||||
if left[k] < right[k] {
|
||||
return true
|
||||
} else if left[k] > right[k] {
|
||||
return false
|
||||
}
|
||||
k++
|
||||
}
|
||||
}
|
||||
|
||||
func (bindings sortableBindings) Swap(i, j int) {
|
||||
bindings[i], bindings[j] = bindings[j], bindings[i]
|
||||
}
|
||||
|
||||
func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) {
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
shouldOmitEmpty := false
|
||||
tagParts := strings.Split(binding.Field.Tag().Get(cfg.getTagKey()), ",")
|
||||
for _, tagPart := range tagParts[1:] {
|
||||
if tagPart == "omitempty" {
|
||||
shouldOmitEmpty = true
|
||||
} else if tagPart == "string" {
|
||||
if binding.Field.Type().Kind() == reflect.String {
|
||||
binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg}
|
||||
binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg}
|
||||
} else {
|
||||
binding.Decoder = &stringModeNumberDecoder{binding.Decoder}
|
||||
binding.Encoder = &stringModeNumberEncoder{binding.Encoder}
|
||||
}
|
||||
}
|
||||
}
|
||||
binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder}
|
||||
binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty}
|
||||
}
|
||||
}
|
||||
|
||||
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
|
||||
// ignore?
|
||||
if wholeTag == "-" {
|
||||
return []string{}
|
||||
}
|
||||
// rename?
|
||||
var fieldNames []string
|
||||
if tagProvidedFieldName == "" {
|
||||
fieldNames = []string{originalFieldName}
|
||||
} else {
|
||||
fieldNames = []string{tagProvidedFieldName}
|
||||
}
|
||||
// private?
|
||||
isNotExported := unicode.IsLower(rune(originalFieldName[0])) || originalFieldName[0] == '_'
|
||||
if isNotExported {
|
||||
fieldNames = []string{}
|
||||
}
|
||||
return fieldNames
|
||||
}
|
112
vendor/github.com/json-iterator/go/reflect_json_number.go
generated
vendored
Normal file
112
vendor/github.com/json-iterator/go/reflect_json_number.go
generated
vendored
Normal file
|
@ -0,0 +1,112 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/modern-go/reflect2"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type Number string
|
||||
|
||||
// String returns the literal text of the number.
|
||||
func (n Number) String() string { return string(n) }
|
||||
|
||||
// Float64 returns the number as a float64.
|
||||
func (n Number) Float64() (float64, error) {
|
||||
return strconv.ParseFloat(string(n), 64)
|
||||
}
|
||||
|
||||
// Int64 returns the number as an int64.
|
||||
func (n Number) Int64() (int64, error) {
|
||||
return strconv.ParseInt(string(n), 10, 64)
|
||||
}
|
||||
|
||||
func CastJsonNumber(val interface{}) (string, bool) {
|
||||
switch typedVal := val.(type) {
|
||||
case json.Number:
|
||||
return string(typedVal), true
|
||||
case Number:
|
||||
return string(typedVal), true
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
var jsonNumberType = reflect2.TypeOfPtr((*json.Number)(nil)).Elem()
|
||||
var jsoniterNumberType = reflect2.TypeOfPtr((*Number)(nil)).Elem()
|
||||
|
||||
func createDecoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typ.AssignableTo(jsonNumberType) {
|
||||
return &jsonNumberCodec{}
|
||||
}
|
||||
if typ.AssignableTo(jsoniterNumberType) {
|
||||
return &jsoniterNumberCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createEncoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ.AssignableTo(jsonNumberType) {
|
||||
return &jsonNumberCodec{}
|
||||
}
|
||||
if typ.AssignableTo(jsoniterNumberType) {
|
||||
return &jsoniterNumberCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type jsonNumberCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
switch iter.WhatIsNext() {
|
||||
case StringValue:
|
||||
*((*json.Number)(ptr)) = json.Number(iter.ReadString())
|
||||
case NilValue:
|
||||
iter.skipFourBytes('n', 'u', 'l', 'l')
|
||||
*((*json.Number)(ptr)) = ""
|
||||
default:
|
||||
*((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
number := *((*json.Number)(ptr))
|
||||
if len(number) == 0 {
|
||||
stream.writeByte('0')
|
||||
} else {
|
||||
stream.WriteRaw(string(number))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*json.Number)(ptr))) == 0
|
||||
}
|
||||
|
||||
type jsoniterNumberCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
switch iter.WhatIsNext() {
|
||||
case StringValue:
|
||||
*((*Number)(ptr)) = Number(iter.ReadString())
|
||||
case NilValue:
|
||||
iter.skipFourBytes('n', 'u', 'l', 'l')
|
||||
*((*Number)(ptr)) = ""
|
||||
default:
|
||||
*((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
number := *((*Number)(ptr))
|
||||
if len(number) == 0 {
|
||||
stream.writeByte('0')
|
||||
} else {
|
||||
stream.WriteRaw(string(number))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*Number)(ptr))) == 0
|
||||
}
|
60
vendor/github.com/json-iterator/go/reflect_json_raw_message.go
generated
vendored
Normal file
60
vendor/github.com/json-iterator/go/reflect_json_raw_message.go
generated
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/modern-go/reflect2"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
|
||||
var jsoniterRawMessageType = reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()
|
||||
|
||||
func createEncoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ == jsonRawMessageType {
|
||||
return &jsonRawMessageCodec{}
|
||||
}
|
||||
if typ == jsoniterRawMessageType {
|
||||
return &jsoniterRawMessageCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createDecoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typ == jsonRawMessageType {
|
||||
return &jsonRawMessageCodec{}
|
||||
}
|
||||
if typ == jsoniterRawMessageType {
|
||||
return &jsoniterRawMessageCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type jsonRawMessageCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*json.RawMessage)(ptr))) == 0
|
||||
}
|
||||
|
||||
type jsoniterRawMessageCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteRaw(string(*((*RawMessage)(ptr))))
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*RawMessage)(ptr))) == 0
|
||||
}
|
346
vendor/github.com/json-iterator/go/reflect_map.go
generated
vendored
Normal file
346
vendor/github.com/json-iterator/go/reflect_map.go
generated
vendored
Normal file
|
@ -0,0 +1,346 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"sort"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
mapType := typ.(*reflect2.UnsafeMapType)
|
||||
keyDecoder := decoderOfMapKey(ctx.append("[mapKey]"), mapType.Key())
|
||||
elemDecoder := decoderOfType(ctx.append("[mapElem]"), mapType.Elem())
|
||||
return &mapDecoder{
|
||||
mapType: mapType,
|
||||
keyType: mapType.Key(),
|
||||
elemType: mapType.Elem(),
|
||||
keyDecoder: keyDecoder,
|
||||
elemDecoder: elemDecoder,
|
||||
}
|
||||
}
|
||||
|
||||
func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
mapType := typ.(*reflect2.UnsafeMapType)
|
||||
if ctx.sortMapKeys {
|
||||
return &sortKeysMapEncoder{
|
||||
mapType: mapType,
|
||||
keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
|
||||
elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
|
||||
}
|
||||
}
|
||||
return &mapEncoder{
|
||||
mapType: mapType,
|
||||
keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
|
||||
elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
|
||||
}
|
||||
}
|
||||
|
||||
func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder := extension.CreateMapKeyDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(unmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&unmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(unmarshalerType) {
|
||||
return &unmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
if ptrType.Implements(textUnmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(textUnmarshalerType) {
|
||||
return &textUnmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
case reflect.Bool,
|
||||
reflect.Uint8, reflect.Int8,
|
||||
reflect.Uint16, reflect.Int16,
|
||||
reflect.Uint32, reflect.Int32,
|
||||
reflect.Uint64, reflect.Int64,
|
||||
reflect.Uint, reflect.Int,
|
||||
reflect.Float32, reflect.Float64,
|
||||
reflect.Uintptr:
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
|
||||
default:
|
||||
return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
||||
}
|
||||
}
|
||||
|
||||
func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder := extension.CreateMapKeyEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
|
||||
if typ == textMarshalerType {
|
||||
return &directTextMarshalerEncoder{
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
case reflect.Bool,
|
||||
reflect.Uint8, reflect.Int8,
|
||||
reflect.Uint16, reflect.Int16,
|
||||
reflect.Uint32, reflect.Int32,
|
||||
reflect.Uint64, reflect.Int64,
|
||||
reflect.Uint, reflect.Int,
|
||||
reflect.Float32, reflect.Float64,
|
||||
reflect.Uintptr:
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
|
||||
default:
|
||||
if typ.Kind() == reflect.Interface {
|
||||
return &dynamicMapKeyEncoder{ctx, typ}
|
||||
}
|
||||
return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
||||
}
|
||||
}
|
||||
|
||||
type mapDecoder struct {
|
||||
mapType *reflect2.UnsafeMapType
|
||||
keyType reflect2.Type
|
||||
elemType reflect2.Type
|
||||
keyDecoder ValDecoder
|
||||
elemDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
mapType := decoder.mapType
|
||||
c := iter.nextToken()
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
*(*unsafe.Pointer)(ptr) = nil
|
||||
mapType.UnsafeSet(ptr, mapType.UnsafeNew())
|
||||
return
|
||||
}
|
||||
if mapType.UnsafeIsNil(ptr) {
|
||||
mapType.UnsafeSet(ptr, mapType.UnsafeMakeMap(0))
|
||||
}
|
||||
if c != '{' {
|
||||
iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '}' {
|
||||
return
|
||||
}
|
||||
iter.unreadByte()
|
||||
key := decoder.keyType.UnsafeNew()
|
||||
decoder.keyDecoder.Decode(key, iter)
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
elem := decoder.elemType.UnsafeNew()
|
||||
decoder.elemDecoder.Decode(elem, iter)
|
||||
decoder.mapType.UnsafeSetIndex(ptr, key, elem)
|
||||
for c = iter.nextToken(); c == ','; c = iter.nextToken() {
|
||||
key := decoder.keyType.UnsafeNew()
|
||||
decoder.keyDecoder.Decode(key, iter)
|
||||
c = iter.nextToken()
|
||||
if c != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
elem := decoder.elemType.UnsafeNew()
|
||||
decoder.elemDecoder.Decode(elem, iter)
|
||||
decoder.mapType.UnsafeSetIndex(ptr, key, elem)
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadMapCB", `expect }, but found `+string([]byte{c}))
|
||||
}
|
||||
}
|
||||
|
||||
type numericMapKeyDecoder struct {
|
||||
decoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *numericMapKeyDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
c := iter.nextToken()
|
||||
if c != '"' {
|
||||
iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
decoder.decoder.Decode(ptr, iter)
|
||||
c = iter.nextToken()
|
||||
if c != '"' {
|
||||
iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
type numericMapKeyEncoder struct {
|
||||
encoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *numericMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.writeByte('"')
|
||||
encoder.encoder.Encode(ptr, stream)
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type dynamicMapKeyEncoder struct {
|
||||
ctx *ctx
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream)
|
||||
}
|
||||
|
||||
func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj))
|
||||
}
|
||||
|
||||
type mapEncoder struct {
|
||||
mapType *reflect2.UnsafeMapType
|
||||
keyEncoder ValEncoder
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *(*unsafe.Pointer)(ptr) == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
stream.WriteObjectStart()
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
for i := 0; iter.HasNext(); i++ {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
key, elem := iter.UnsafeNext()
|
||||
encoder.keyEncoder.Encode(key, stream)
|
||||
if stream.indention > 0 {
|
||||
stream.writeTwoBytes(byte(':'), byte(' '))
|
||||
} else {
|
||||
stream.writeByte(':')
|
||||
}
|
||||
encoder.elemEncoder.Encode(elem, stream)
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
}
|
||||
|
||||
func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
return !iter.HasNext()
|
||||
}
|
||||
|
||||
type sortKeysMapEncoder struct {
|
||||
mapType *reflect2.UnsafeMapType
|
||||
keyEncoder ValEncoder
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *(*unsafe.Pointer)(ptr) == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
stream.WriteObjectStart()
|
||||
mapIter := encoder.mapType.UnsafeIterate(ptr)
|
||||
subStream := stream.cfg.BorrowStream(nil)
|
||||
subStream.Attachment = stream.Attachment
|
||||
subIter := stream.cfg.BorrowIterator(nil)
|
||||
keyValues := encodedKeyValues{}
|
||||
for mapIter.HasNext() {
|
||||
key, elem := mapIter.UnsafeNext()
|
||||
subStreamIndex := subStream.Buffered()
|
||||
encoder.keyEncoder.Encode(key, subStream)
|
||||
if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
encodedKey := subStream.Buffer()[subStreamIndex:]
|
||||
subIter.ResetBytes(encodedKey)
|
||||
decodedKey := subIter.ReadString()
|
||||
if stream.indention > 0 {
|
||||
subStream.writeTwoBytes(byte(':'), byte(' '))
|
||||
} else {
|
||||
subStream.writeByte(':')
|
||||
}
|
||||
encoder.elemEncoder.Encode(elem, subStream)
|
||||
keyValues = append(keyValues, encodedKV{
|
||||
key: decodedKey,
|
||||
keyValue: subStream.Buffer()[subStreamIndex:],
|
||||
})
|
||||
}
|
||||
sort.Sort(keyValues)
|
||||
for i, keyValue := range keyValues {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.Write(keyValue.keyValue)
|
||||
}
|
||||
if subStream.Error != nil && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
stream.cfg.ReturnStream(subStream)
|
||||
stream.cfg.ReturnIterator(subIter)
|
||||
}
|
||||
|
||||
func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
return !iter.HasNext()
|
||||
}
|
||||
|
||||
type encodedKeyValues []encodedKV
|
||||
|
||||
type encodedKV struct {
|
||||
key string
|
||||
keyValue []byte
|
||||
}
|
||||
|
||||
func (sv encodedKeyValues) Len() int { return len(sv) }
|
||||
func (sv encodedKeyValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
|
||||
func (sv encodedKeyValues) Less(i, j int) bool { return sv[i].key < sv[j].key }
|
225
vendor/github.com/json-iterator/go/reflect_marshaler.go
generated
vendored
Normal file
225
vendor/github.com/json-iterator/go/reflect_marshaler.go
generated
vendored
Normal file
|
@ -0,0 +1,225 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
|
||||
var unmarshalerType = reflect2.TypeOfPtr((*json.Unmarshaler)(nil)).Elem()
|
||||
var textMarshalerType = reflect2.TypeOfPtr((*encoding.TextMarshaler)(nil)).Elem()
|
||||
var textUnmarshalerType = reflect2.TypeOfPtr((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||
|
||||
func createDecoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(unmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&unmarshalerDecoder{ptrType},
|
||||
}
|
||||
}
|
||||
if ptrType.Implements(textUnmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{ptrType},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createEncoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ == marshalerType {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, typ)
|
||||
var encoder ValEncoder = &directMarshalerEncoder{
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
if typ.Implements(marshalerType) {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, typ)
|
||||
var encoder ValEncoder = &marshalerEncoder{
|
||||
valType: typ,
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ctx.prefix != "" && ptrType.Implements(marshalerType) {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
|
||||
var encoder ValEncoder = &marshalerEncoder{
|
||||
valType: ptrType,
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return &referenceEncoder{encoder}
|
||||
}
|
||||
if typ == textMarshalerType {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, typ)
|
||||
var encoder ValEncoder = &directTextMarshalerEncoder{
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, typ)
|
||||
var encoder ValEncoder = &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return encoder
|
||||
}
|
||||
// if prefix is empty, the type is the root type
|
||||
if ctx.prefix != "" && ptrType.Implements(textMarshalerType) {
|
||||
checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
|
||||
var encoder ValEncoder = &textMarshalerEncoder{
|
||||
valType: ptrType,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
checkIsEmpty: checkIsEmpty,
|
||||
}
|
||||
return &referenceEncoder{encoder}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type marshalerEncoder struct {
|
||||
checkIsEmpty checkIsEmpty
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
marshaler := obj.(json.Marshaler)
|
||||
bytes, err := marshaler.MarshalJSON()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
// html escape was already done by jsoniter
|
||||
// but the extra '\n' should be trimed
|
||||
l := len(bytes)
|
||||
if l > 0 && bytes[l-1] == '\n' {
|
||||
bytes = bytes[:l-1]
|
||||
}
|
||||
stream.Write(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.checkIsEmpty.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type directMarshalerEncoder struct {
|
||||
checkIsEmpty checkIsEmpty
|
||||
}
|
||||
|
||||
func (encoder *directMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
marshaler := *(*json.Marshaler)(ptr)
|
||||
if marshaler == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
bytes, err := marshaler.MarshalJSON()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
stream.Write(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *directMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.checkIsEmpty.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type textMarshalerEncoder struct {
|
||||
valType reflect2.Type
|
||||
stringEncoder ValEncoder
|
||||
checkIsEmpty checkIsEmpty
|
||||
}
|
||||
|
||||
func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
marshaler := (obj).(encoding.TextMarshaler)
|
||||
bytes, err := marshaler.MarshalText()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
str := string(bytes)
|
||||
encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.checkIsEmpty.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type directTextMarshalerEncoder struct {
|
||||
stringEncoder ValEncoder
|
||||
checkIsEmpty checkIsEmpty
|
||||
}
|
||||
|
||||
func (encoder *directTextMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
marshaler := *(*encoding.TextMarshaler)(ptr)
|
||||
if marshaler == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
bytes, err := marshaler.MarshalText()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
str := string(bytes)
|
||||
encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *directTextMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.checkIsEmpty.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type unmarshalerDecoder struct {
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
valType := decoder.valType
|
||||
obj := valType.UnsafeIndirect(ptr)
|
||||
unmarshaler := obj.(json.Unmarshaler)
|
||||
iter.nextToken()
|
||||
iter.unreadByte() // skip spaces
|
||||
bytes := iter.SkipAndReturnBytes()
|
||||
err := unmarshaler.UnmarshalJSON(bytes)
|
||||
if err != nil {
|
||||
iter.ReportError("unmarshalerDecoder", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
type textUnmarshalerDecoder struct {
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
valType := decoder.valType
|
||||
obj := valType.UnsafeIndirect(ptr)
|
||||
if reflect2.IsNil(obj) {
|
||||
ptrType := valType.(*reflect2.UnsafePtrType)
|
||||
elemType := ptrType.Elem()
|
||||
elem := elemType.UnsafeNew()
|
||||
ptrType.UnsafeSet(ptr, unsafe.Pointer(&elem))
|
||||
obj = valType.UnsafeIndirect(ptr)
|
||||
}
|
||||
unmarshaler := (obj).(encoding.TextUnmarshaler)
|
||||
str := iter.ReadString()
|
||||
err := unmarshaler.UnmarshalText([]byte(str))
|
||||
if err != nil {
|
||||
iter.ReportError("textUnmarshalerDecoder", err.Error())
|
||||
}
|
||||
}
|
453
vendor/github.com/json-iterator/go/reflect_native.go
generated
vendored
Normal file
453
vendor/github.com/json-iterator/go/reflect_native.go
generated
vendored
Normal file
|
@ -0,0 +1,453 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
const ptrSize = 32 << uintptr(^uintptr(0)>>63)
|
||||
|
||||
func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
|
||||
sliceDecoder := decoderOfSlice(ctx, typ)
|
||||
return &base64Codec{sliceDecoder: sliceDecoder}
|
||||
}
|
||||
typeName := typ.String()
|
||||
kind := typ.Kind()
|
||||
switch kind {
|
||||
case reflect.String:
|
||||
if typeName != "string" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
|
||||
}
|
||||
return &stringCodec{}
|
||||
case reflect.Int:
|
||||
if typeName != "int" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
|
||||
}
|
||||
if strconv.IntSize == 32 {
|
||||
return &int32Codec{}
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Int8:
|
||||
if typeName != "int8" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
|
||||
}
|
||||
return &int8Codec{}
|
||||
case reflect.Int16:
|
||||
if typeName != "int16" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
|
||||
}
|
||||
return &int16Codec{}
|
||||
case reflect.Int32:
|
||||
if typeName != "int32" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
|
||||
}
|
||||
return &int32Codec{}
|
||||
case reflect.Int64:
|
||||
if typeName != "int64" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Uint:
|
||||
if typeName != "uint" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
|
||||
}
|
||||
if strconv.IntSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint8:
|
||||
if typeName != "uint8" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
|
||||
}
|
||||
return &uint8Codec{}
|
||||
case reflect.Uint16:
|
||||
if typeName != "uint16" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
|
||||
}
|
||||
return &uint16Codec{}
|
||||
case reflect.Uint32:
|
||||
if typeName != "uint32" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
|
||||
}
|
||||
return &uint32Codec{}
|
||||
case reflect.Uintptr:
|
||||
if typeName != "uintptr" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
|
||||
}
|
||||
if ptrSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint64:
|
||||
if typeName != "uint64" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Float32:
|
||||
if typeName != "float32" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
|
||||
}
|
||||
return &float32Codec{}
|
||||
case reflect.Float64:
|
||||
if typeName != "float64" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
|
||||
}
|
||||
return &float64Codec{}
|
||||
case reflect.Bool:
|
||||
if typeName != "bool" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
|
||||
}
|
||||
return &boolCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
|
||||
sliceDecoder := decoderOfSlice(ctx, typ)
|
||||
return &base64Codec{sliceDecoder: sliceDecoder}
|
||||
}
|
||||
typeName := typ.String()
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
if typeName != "string" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
|
||||
}
|
||||
return &stringCodec{}
|
||||
case reflect.Int:
|
||||
if typeName != "int" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
|
||||
}
|
||||
if strconv.IntSize == 32 {
|
||||
return &int32Codec{}
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Int8:
|
||||
if typeName != "int8" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
|
||||
}
|
||||
return &int8Codec{}
|
||||
case reflect.Int16:
|
||||
if typeName != "int16" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
|
||||
}
|
||||
return &int16Codec{}
|
||||
case reflect.Int32:
|
||||
if typeName != "int32" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
|
||||
}
|
||||
return &int32Codec{}
|
||||
case reflect.Int64:
|
||||
if typeName != "int64" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Uint:
|
||||
if typeName != "uint" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
|
||||
}
|
||||
if strconv.IntSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint8:
|
||||
if typeName != "uint8" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
|
||||
}
|
||||
return &uint8Codec{}
|
||||
case reflect.Uint16:
|
||||
if typeName != "uint16" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
|
||||
}
|
||||
return &uint16Codec{}
|
||||
case reflect.Uint32:
|
||||
if typeName != "uint32" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
|
||||
}
|
||||
return &uint32Codec{}
|
||||
case reflect.Uintptr:
|
||||
if typeName != "uintptr" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
|
||||
}
|
||||
if ptrSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint64:
|
||||
if typeName != "uint64" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Float32:
|
||||
if typeName != "float32" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
|
||||
}
|
||||
return &float32Codec{}
|
||||
case reflect.Float64:
|
||||
if typeName != "float64" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
|
||||
}
|
||||
return &float64Codec{}
|
||||
case reflect.Bool:
|
||||
if typeName != "bool" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
|
||||
}
|
||||
return &boolCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type stringCodec struct {
|
||||
}
|
||||
|
||||
func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*string)(ptr)) = iter.ReadString()
|
||||
}
|
||||
|
||||
func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
str := *((*string)(ptr))
|
||||
stream.WriteString(str)
|
||||
}
|
||||
|
||||
func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*string)(ptr)) == ""
|
||||
}
|
||||
|
||||
type int8Codec struct {
|
||||
}
|
||||
|
||||
func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*int8)(ptr)) = iter.ReadInt8()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteInt8(*((*int8)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int8)(ptr)) == 0
|
||||
}
|
||||
|
||||
type int16Codec struct {
|
||||
}
|
||||
|
||||
func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*int16)(ptr)) = iter.ReadInt16()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteInt16(*((*int16)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int16)(ptr)) == 0
|
||||
}
|
||||
|
||||
type int32Codec struct {
|
||||
}
|
||||
|
||||
func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*int32)(ptr)) = iter.ReadInt32()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteInt32(*((*int32)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int32)(ptr)) == 0
|
||||
}
|
||||
|
||||
type int64Codec struct {
|
||||
}
|
||||
|
||||
func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*int64)(ptr)) = iter.ReadInt64()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteInt64(*((*int64)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int64)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uint8Codec struct {
|
||||
}
|
||||
|
||||
func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uint8)(ptr)) = iter.ReadUint8()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint8(*((*uint8)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uint8)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uint16Codec struct {
|
||||
}
|
||||
|
||||
func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uint16)(ptr)) = iter.ReadUint16()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint16(*((*uint16)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uint16)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uint32Codec struct {
|
||||
}
|
||||
|
||||
func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uint32)(ptr)) = iter.ReadUint32()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint32(*((*uint32)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uint32)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uint64Codec struct {
|
||||
}
|
||||
|
||||
func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uint64)(ptr)) = iter.ReadUint64()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint64(*((*uint64)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uint64)(ptr)) == 0
|
||||
}
|
||||
|
||||
type float32Codec struct {
|
||||
}
|
||||
|
||||
func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*float32)(ptr)) = iter.ReadFloat32()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat32(*((*float32)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float32)(ptr)) == 0
|
||||
}
|
||||
|
||||
type float64Codec struct {
|
||||
}
|
||||
|
||||
func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*float64)(ptr)) = iter.ReadFloat64()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat64(*((*float64)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float64)(ptr)) == 0
|
||||
}
|
||||
|
||||
type boolCodec struct {
|
||||
}
|
||||
|
||||
func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*bool)(ptr)) = iter.ReadBool()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteBool(*((*bool)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return !(*((*bool)(ptr)))
|
||||
}
|
||||
|
||||
type base64Codec struct {
|
||||
sliceType *reflect2.UnsafeSliceType
|
||||
sliceDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.ReadNil() {
|
||||
codec.sliceType.UnsafeSetNil(ptr)
|
||||
return
|
||||
}
|
||||
switch iter.WhatIsNext() {
|
||||
case StringValue:
|
||||
src := iter.ReadString()
|
||||
dst, err := base64.StdEncoding.DecodeString(src)
|
||||
if err != nil {
|
||||
iter.ReportError("decode base64", err.Error())
|
||||
} else {
|
||||
codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
|
||||
}
|
||||
case ArrayValue:
|
||||
codec.sliceDecoder.Decode(ptr, iter)
|
||||
default:
|
||||
iter.ReportError("base64Codec", "invalid input")
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if codec.sliceType.UnsafeIsNil(ptr) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
src := *((*[]byte)(ptr))
|
||||
encoding := base64.StdEncoding
|
||||
stream.writeByte('"')
|
||||
if len(src) != 0 {
|
||||
size := encoding.EncodedLen(len(src))
|
||||
buf := make([]byte, size)
|
||||
encoding.Encode(buf, src)
|
||||
stream.buf = append(stream.buf, buf...)
|
||||
}
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*[]byte)(ptr))) == 0
|
||||
}
|
129
vendor/github.com/json-iterator/go/reflect_optional.go
generated
vendored
Normal file
129
vendor/github.com/json-iterator/go/reflect_optional.go
generated
vendored
Normal file
|
@ -0,0 +1,129 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/modern-go/reflect2"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
elemType := ptrType.Elem()
|
||||
decoder := decoderOfType(ctx, elemType)
|
||||
return &OptionalDecoder{elemType, decoder}
|
||||
}
|
||||
|
||||
func encoderOfOptional(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
elemType := ptrType.Elem()
|
||||
elemEncoder := encoderOfType(ctx, elemType)
|
||||
encoder := &OptionalEncoder{elemEncoder}
|
||||
return encoder
|
||||
}
|
||||
|
||||
type OptionalDecoder struct {
|
||||
ValueType reflect2.Type
|
||||
ValueDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.ReadNil() {
|
||||
*((*unsafe.Pointer)(ptr)) = nil
|
||||
} else {
|
||||
if *((*unsafe.Pointer)(ptr)) == nil {
|
||||
//pointer to null, we have to allocate memory to hold the value
|
||||
newPtr := decoder.ValueType.UnsafeNew()
|
||||
decoder.ValueDecoder.Decode(newPtr, iter)
|
||||
*((*unsafe.Pointer)(ptr)) = newPtr
|
||||
} else {
|
||||
//reuse existing instance
|
||||
decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type dereferenceDecoder struct {
|
||||
// only to deference a pointer
|
||||
valueType reflect2.Type
|
||||
valueDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if *((*unsafe.Pointer)(ptr)) == nil {
|
||||
//pointer to null, we have to allocate memory to hold the value
|
||||
newPtr := decoder.valueType.UnsafeNew()
|
||||
decoder.valueDecoder.Decode(newPtr, iter)
|
||||
*((*unsafe.Pointer)(ptr)) = newPtr
|
||||
} else {
|
||||
//reuse existing instance
|
||||
decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
|
||||
}
|
||||
}
|
||||
|
||||
type OptionalEncoder struct {
|
||||
ValueEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *((*unsafe.Pointer)(ptr)) == nil {
|
||||
stream.WriteNil()
|
||||
} else {
|
||||
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*unsafe.Pointer)(ptr)) == nil
|
||||
}
|
||||
|
||||
type dereferenceEncoder struct {
|
||||
ValueEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *((*unsafe.Pointer)(ptr)) == nil {
|
||||
stream.WriteNil()
|
||||
} else {
|
||||
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
dePtr := *((*unsafe.Pointer)(ptr))
|
||||
if dePtr == nil {
|
||||
return true
|
||||
}
|
||||
return encoder.ValueEncoder.IsEmpty(dePtr)
|
||||
}
|
||||
|
||||
func (encoder *dereferenceEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
|
||||
deReferenced := *((*unsafe.Pointer)(ptr))
|
||||
if deReferenced == nil {
|
||||
return true
|
||||
}
|
||||
isEmbeddedPtrNil, converted := encoder.ValueEncoder.(IsEmbeddedPtrNil)
|
||||
if !converted {
|
||||
return false
|
||||
}
|
||||
fieldPtr := unsafe.Pointer(deReferenced)
|
||||
return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
|
||||
}
|
||||
|
||||
type referenceEncoder struct {
|
||||
encoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *referenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
|
||||
}
|
||||
|
||||
func (encoder *referenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
|
||||
}
|
||||
|
||||
type referenceDecoder struct {
|
||||
decoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *referenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.decoder.Decode(unsafe.Pointer(&ptr), iter)
|
||||
}
|
99
vendor/github.com/json-iterator/go/reflect_slice.go
generated
vendored
Normal file
99
vendor/github.com/json-iterator/go/reflect_slice.go
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
sliceType := typ.(*reflect2.UnsafeSliceType)
|
||||
decoder := decoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
|
||||
return &sliceDecoder{sliceType, decoder}
|
||||
}
|
||||
|
||||
func encoderOfSlice(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
sliceType := typ.(*reflect2.UnsafeSliceType)
|
||||
encoder := encoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
|
||||
return &sliceEncoder{sliceType, encoder}
|
||||
}
|
||||
|
||||
type sliceEncoder struct {
|
||||
sliceType *reflect2.UnsafeSliceType
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if encoder.sliceType.UnsafeIsNil(ptr) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
length := encoder.sliceType.UnsafeLengthOf(ptr)
|
||||
if length == 0 {
|
||||
stream.WriteEmptyArray()
|
||||
return
|
||||
}
|
||||
stream.WriteArrayStart()
|
||||
encoder.elemEncoder.Encode(encoder.sliceType.UnsafeGetIndex(ptr, 0), stream)
|
||||
for i := 1; i < length; i++ {
|
||||
stream.WriteMore()
|
||||
elemPtr := encoder.sliceType.UnsafeGetIndex(ptr, i)
|
||||
encoder.elemEncoder.Encode(elemPtr, stream)
|
||||
}
|
||||
stream.WriteArrayEnd()
|
||||
if stream.Error != nil && stream.Error != io.EOF {
|
||||
stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.sliceType.UnsafeLengthOf(ptr) == 0
|
||||
}
|
||||
|
||||
type sliceDecoder struct {
|
||||
sliceType *reflect2.UnsafeSliceType
|
||||
elemDecoder ValDecoder
|
||||
}
|
||||
|
||||
func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
decoder.doDecode(ptr, iter)
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
c := iter.nextToken()
|
||||
sliceType := decoder.sliceType
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
sliceType.UnsafeSetNil(ptr)
|
||||
return
|
||||
}
|
||||
if c != '[' {
|
||||
iter.ReportError("decode slice", "expect [ or n, but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == ']' {
|
||||
sliceType.UnsafeSet(ptr, sliceType.UnsafeMakeSlice(0, 0))
|
||||
return
|
||||
}
|
||||
iter.unreadByte()
|
||||
sliceType.UnsafeGrow(ptr, 1)
|
||||
elemPtr := sliceType.UnsafeGetIndex(ptr, 0)
|
||||
decoder.elemDecoder.Decode(elemPtr, iter)
|
||||
length := 1
|
||||
for c = iter.nextToken(); c == ','; c = iter.nextToken() {
|
||||
idx := length
|
||||
length += 1
|
||||
sliceType.UnsafeGrow(ptr, length)
|
||||
elemPtr = sliceType.UnsafeGetIndex(ptr, idx)
|
||||
decoder.elemDecoder.Decode(elemPtr, iter)
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("decode slice", "expect ], but found "+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
1092
vendor/github.com/json-iterator/go/reflect_struct_decoder.go
generated
vendored
Normal file
1092
vendor/github.com/json-iterator/go/reflect_struct_decoder.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
211
vendor/github.com/json-iterator/go/reflect_struct_encoder.go
generated
vendored
Normal file
211
vendor/github.com/json-iterator/go/reflect_struct_encoder.go
generated
vendored
Normal file
|
@ -0,0 +1,211 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
type bindingTo struct {
|
||||
binding *Binding
|
||||
toName string
|
||||
ignored bool
|
||||
}
|
||||
orderedBindings := []*bindingTo{}
|
||||
structDescriptor := describeStruct(ctx, typ)
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
for _, toName := range binding.ToNames {
|
||||
new := &bindingTo{
|
||||
binding: binding,
|
||||
toName: toName,
|
||||
}
|
||||
for _, old := range orderedBindings {
|
||||
if old.toName != toName {
|
||||
continue
|
||||
}
|
||||
old.ignored, new.ignored = resolveConflictBinding(ctx.frozenConfig, old.binding, new.binding)
|
||||
}
|
||||
orderedBindings = append(orderedBindings, new)
|
||||
}
|
||||
}
|
||||
if len(orderedBindings) == 0 {
|
||||
return &emptyStructEncoder{}
|
||||
}
|
||||
finalOrderedFields := []structFieldTo{}
|
||||
for _, bindingTo := range orderedBindings {
|
||||
if !bindingTo.ignored {
|
||||
finalOrderedFields = append(finalOrderedFields, structFieldTo{
|
||||
encoder: bindingTo.binding.Encoder.(*structFieldEncoder),
|
||||
toName: bindingTo.toName,
|
||||
})
|
||||
}
|
||||
}
|
||||
return &structEncoder{typ, finalOrderedFields}
|
||||
}
|
||||
|
||||
func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty {
|
||||
encoder := createEncoderOfNative(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
kind := typ.Kind()
|
||||
switch kind {
|
||||
case reflect.Interface:
|
||||
return &dynamicEncoder{typ}
|
||||
case reflect.Struct:
|
||||
return &structEncoder{typ: typ}
|
||||
case reflect.Array:
|
||||
return &arrayEncoder{}
|
||||
case reflect.Slice:
|
||||
return &sliceEncoder{}
|
||||
case reflect.Map:
|
||||
return encoderOfMap(ctx, typ)
|
||||
case reflect.Ptr:
|
||||
return &OptionalEncoder{}
|
||||
default:
|
||||
return &lazyErrorEncoder{err: fmt.Errorf("unsupported type: %v", typ)}
|
||||
}
|
||||
}
|
||||
|
||||
func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
|
||||
newTagged := new.Field.Tag().Get(cfg.getTagKey()) != ""
|
||||
oldTagged := old.Field.Tag().Get(cfg.getTagKey()) != ""
|
||||
if newTagged {
|
||||
if oldTagged {
|
||||
if len(old.levels) > len(new.levels) {
|
||||
return true, false
|
||||
} else if len(new.levels) > len(old.levels) {
|
||||
return false, true
|
||||
} else {
|
||||
return true, true
|
||||
}
|
||||
} else {
|
||||
return true, false
|
||||
}
|
||||
} else {
|
||||
if oldTagged {
|
||||
return true, false
|
||||
}
|
||||
if len(old.levels) > len(new.levels) {
|
||||
return true, false
|
||||
} else if len(new.levels) > len(old.levels) {
|
||||
return false, true
|
||||
} else {
|
||||
return true, true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type structFieldEncoder struct {
|
||||
field reflect2.StructField
|
||||
fieldEncoder ValEncoder
|
||||
omitempty bool
|
||||
}
|
||||
|
||||
func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
fieldPtr := encoder.field.UnsafeGet(ptr)
|
||||
encoder.fieldEncoder.Encode(fieldPtr, stream)
|
||||
if stream.Error != nil && stream.Error != io.EOF {
|
||||
stream.Error = fmt.Errorf("%s: %s", encoder.field.Name(), stream.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
fieldPtr := encoder.field.UnsafeGet(ptr)
|
||||
return encoder.fieldEncoder.IsEmpty(fieldPtr)
|
||||
}
|
||||
|
||||
func (encoder *structFieldEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
|
||||
isEmbeddedPtrNil, converted := encoder.fieldEncoder.(IsEmbeddedPtrNil)
|
||||
if !converted {
|
||||
return false
|
||||
}
|
||||
fieldPtr := encoder.field.UnsafeGet(ptr)
|
||||
return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
|
||||
}
|
||||
|
||||
type IsEmbeddedPtrNil interface {
|
||||
IsEmbeddedPtrNil(ptr unsafe.Pointer) bool
|
||||
}
|
||||
|
||||
type structEncoder struct {
|
||||
typ reflect2.Type
|
||||
fields []structFieldTo
|
||||
}
|
||||
|
||||
type structFieldTo struct {
|
||||
encoder *structFieldEncoder
|
||||
toName string
|
||||
}
|
||||
|
||||
func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteObjectStart()
|
||||
isNotFirst := false
|
||||
for _, field := range encoder.fields {
|
||||
if field.encoder.omitempty && field.encoder.IsEmpty(ptr) {
|
||||
continue
|
||||
}
|
||||
if field.encoder.IsEmbeddedPtrNil(ptr) {
|
||||
continue
|
||||
}
|
||||
if isNotFirst {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.WriteObjectField(field.toName)
|
||||
field.encoder.Encode(ptr, stream)
|
||||
isNotFirst = true
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
if stream.Error != nil && stream.Error != io.EOF {
|
||||
stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type emptyStructEncoder struct {
|
||||
}
|
||||
|
||||
func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteEmptyObject()
|
||||
}
|
||||
|
||||
func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type stringModeNumberEncoder struct {
|
||||
elemEncoder ValEncoder
|
||||
}
|
||||
|
||||
func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.writeByte('"')
|
||||
encoder.elemEncoder.Encode(ptr, stream)
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.elemEncoder.IsEmpty(ptr)
|
||||
}
|
||||
|
||||
type stringModeStringEncoder struct {
|
||||
elemEncoder ValEncoder
|
||||
cfg *frozenConfig
|
||||
}
|
||||
|
||||
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
tempStream := encoder.cfg.BorrowStream(nil)
|
||||
tempStream.Attachment = stream.Attachment
|
||||
defer encoder.cfg.ReturnStream(tempStream)
|
||||
encoder.elemEncoder.Encode(ptr, tempStream)
|
||||
stream.WriteString(string(tempStream.Buffer()))
|
||||
}
|
||||
|
||||
func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return encoder.elemEncoder.IsEmpty(ptr)
|
||||
}
|
210
vendor/github.com/json-iterator/go/stream.go
generated
vendored
Normal file
210
vendor/github.com/json-iterator/go/stream.go
generated
vendored
Normal file
|
@ -0,0 +1,210 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// stream is a io.Writer like object, with JSON specific write functions.
|
||||
// Error is not returned as return value, but stored as Error member on this stream instance.
|
||||
type Stream struct {
|
||||
cfg *frozenConfig
|
||||
out io.Writer
|
||||
buf []byte
|
||||
Error error
|
||||
indention int
|
||||
Attachment interface{} // open for customized encoder
|
||||
}
|
||||
|
||||
// NewStream create new stream instance.
|
||||
// cfg can be jsoniter.ConfigDefault.
|
||||
// out can be nil if write to internal buffer.
|
||||
// bufSize is the initial size for the internal buffer in bytes.
|
||||
func NewStream(cfg API, out io.Writer, bufSize int) *Stream {
|
||||
return &Stream{
|
||||
cfg: cfg.(*frozenConfig),
|
||||
out: out,
|
||||
buf: make([]byte, 0, bufSize),
|
||||
Error: nil,
|
||||
indention: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Pool returns a pool can provide more stream with same configuration
|
||||
func (stream *Stream) Pool() StreamPool {
|
||||
return stream.cfg
|
||||
}
|
||||
|
||||
// Reset reuse this stream instance by assign a new writer
|
||||
func (stream *Stream) Reset(out io.Writer) {
|
||||
stream.out = out
|
||||
stream.buf = stream.buf[:0]
|
||||
}
|
||||
|
||||
// Available returns how many bytes are unused in the buffer.
|
||||
func (stream *Stream) Available() int {
|
||||
return cap(stream.buf) - len(stream.buf)
|
||||
}
|
||||
|
||||
// Buffered returns the number of bytes that have been written into the current buffer.
|
||||
func (stream *Stream) Buffered() int {
|
||||
return len(stream.buf)
|
||||
}
|
||||
|
||||
// Buffer if writer is nil, use this method to take the result
|
||||
func (stream *Stream) Buffer() []byte {
|
||||
return stream.buf
|
||||
}
|
||||
|
||||
// SetBuffer allows to append to the internal buffer directly
|
||||
func (stream *Stream) SetBuffer(buf []byte) {
|
||||
stream.buf = buf
|
||||
}
|
||||
|
||||
// Write writes the contents of p into the buffer.
|
||||
// It returns the number of bytes written.
|
||||
// If nn < len(p), it also returns an error explaining
|
||||
// why the write is short.
|
||||
func (stream *Stream) Write(p []byte) (nn int, err error) {
|
||||
stream.buf = append(stream.buf, p...)
|
||||
if stream.out != nil {
|
||||
nn, err = stream.out.Write(stream.buf)
|
||||
stream.buf = stream.buf[nn:]
|
||||
return
|
||||
}
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
// WriteByte writes a single byte.
|
||||
func (stream *Stream) writeByte(c byte) {
|
||||
stream.buf = append(stream.buf, c)
|
||||
}
|
||||
|
||||
func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) {
|
||||
stream.buf = append(stream.buf, c1, c2)
|
||||
}
|
||||
|
||||
func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
|
||||
stream.buf = append(stream.buf, c1, c2, c3)
|
||||
}
|
||||
|
||||
func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
|
||||
stream.buf = append(stream.buf, c1, c2, c3, c4)
|
||||
}
|
||||
|
||||
func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
|
||||
stream.buf = append(stream.buf, c1, c2, c3, c4, c5)
|
||||
}
|
||||
|
||||
// Flush writes any buffered data to the underlying io.Writer.
|
||||
func (stream *Stream) Flush() error {
|
||||
if stream.out == nil {
|
||||
return nil
|
||||
}
|
||||
if stream.Error != nil {
|
||||
return stream.Error
|
||||
}
|
||||
_, err := stream.out.Write(stream.buf)
|
||||
if err != nil {
|
||||
if stream.Error == nil {
|
||||
stream.Error = err
|
||||
}
|
||||
return err
|
||||
}
|
||||
stream.buf = stream.buf[:0]
|
||||
return nil
|
||||
}
|
||||
|
||||
// WriteRaw write string out without quotes, just like []byte
|
||||
func (stream *Stream) WriteRaw(s string) {
|
||||
stream.buf = append(stream.buf, s...)
|
||||
}
|
||||
|
||||
// WriteNil write null to stream
|
||||
func (stream *Stream) WriteNil() {
|
||||
stream.writeFourBytes('n', 'u', 'l', 'l')
|
||||
}
|
||||
|
||||
// WriteTrue write true to stream
|
||||
func (stream *Stream) WriteTrue() {
|
||||
stream.writeFourBytes('t', 'r', 'u', 'e')
|
||||
}
|
||||
|
||||
// WriteFalse write false to stream
|
||||
func (stream *Stream) WriteFalse() {
|
||||
stream.writeFiveBytes('f', 'a', 'l', 's', 'e')
|
||||
}
|
||||
|
||||
// WriteBool write true or false into stream
|
||||
func (stream *Stream) WriteBool(val bool) {
|
||||
if val {
|
||||
stream.WriteTrue()
|
||||
} else {
|
||||
stream.WriteFalse()
|
||||
}
|
||||
}
|
||||
|
||||
// WriteObjectStart write { with possible indention
|
||||
func (stream *Stream) WriteObjectStart() {
|
||||
stream.indention += stream.cfg.indentionStep
|
||||
stream.writeByte('{')
|
||||
stream.writeIndention(0)
|
||||
}
|
||||
|
||||
// WriteObjectField write "field": with possible indention
|
||||
func (stream *Stream) WriteObjectField(field string) {
|
||||
stream.WriteString(field)
|
||||
if stream.indention > 0 {
|
||||
stream.writeTwoBytes(':', ' ')
|
||||
} else {
|
||||
stream.writeByte(':')
|
||||
}
|
||||
}
|
||||
|
||||
// WriteObjectEnd write } with possible indention
|
||||
func (stream *Stream) WriteObjectEnd() {
|
||||
stream.writeIndention(stream.cfg.indentionStep)
|
||||
stream.indention -= stream.cfg.indentionStep
|
||||
stream.writeByte('}')
|
||||
}
|
||||
|
||||
// WriteEmptyObject write {}
|
||||
func (stream *Stream) WriteEmptyObject() {
|
||||
stream.writeByte('{')
|
||||
stream.writeByte('}')
|
||||
}
|
||||
|
||||
// WriteMore write , with possible indention
|
||||
func (stream *Stream) WriteMore() {
|
||||
stream.writeByte(',')
|
||||
stream.writeIndention(0)
|
||||
}
|
||||
|
||||
// WriteArrayStart write [ with possible indention
|
||||
func (stream *Stream) WriteArrayStart() {
|
||||
stream.indention += stream.cfg.indentionStep
|
||||
stream.writeByte('[')
|
||||
stream.writeIndention(0)
|
||||
}
|
||||
|
||||
// WriteEmptyArray write []
|
||||
func (stream *Stream) WriteEmptyArray() {
|
||||
stream.writeTwoBytes('[', ']')
|
||||
}
|
||||
|
||||
// WriteArrayEnd write ] with possible indention
|
||||
func (stream *Stream) WriteArrayEnd() {
|
||||
stream.writeIndention(stream.cfg.indentionStep)
|
||||
stream.indention -= stream.cfg.indentionStep
|
||||
stream.writeByte(']')
|
||||
}
|
||||
|
||||
func (stream *Stream) writeIndention(delta int) {
|
||||
if stream.indention == 0 {
|
||||
return
|
||||
}
|
||||
stream.writeByte('\n')
|
||||
toWrite := stream.indention - delta
|
||||
for i := 0; i < toWrite; i++ {
|
||||
stream.buf = append(stream.buf, ' ')
|
||||
}
|
||||
}
|
111
vendor/github.com/json-iterator/go/stream_float.go
generated
vendored
Normal file
111
vendor/github.com/json-iterator/go/stream_float.go
generated
vendored
Normal file
|
@ -0,0 +1,111 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
var pow10 []uint64
|
||||
|
||||
func init() {
|
||||
pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
|
||||
}
|
||||
|
||||
// WriteFloat32 write float32 to stream
|
||||
func (stream *Stream) WriteFloat32(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(float64(val))
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
if abs != 0 {
|
||||
if float32(abs) < 1e-6 || float32(abs) >= 1e21 {
|
||||
fmt = 'e'
|
||||
}
|
||||
}
|
||||
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32)
|
||||
}
|
||||
|
||||
// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat32Lossy(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
}
|
||||
if val > 0x4ffffff {
|
||||
stream.WriteFloat32(val)
|
||||
return
|
||||
}
|
||||
precision := 6
|
||||
exp := uint64(1000000) // 6
|
||||
lval := uint64(float64(val)*float64(exp) + 0.5)
|
||||
stream.WriteUint64(lval / exp)
|
||||
fval := lval % exp
|
||||
if fval == 0 {
|
||||
return
|
||||
}
|
||||
stream.writeByte('.')
|
||||
for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
|
||||
stream.writeByte('0')
|
||||
}
|
||||
stream.WriteUint64(fval)
|
||||
for stream.buf[len(stream.buf)-1] == '0' {
|
||||
stream.buf = stream.buf[:len(stream.buf)-1]
|
||||
}
|
||||
}
|
||||
|
||||
// WriteFloat64 write float64 to stream
|
||||
func (stream *Stream) WriteFloat64(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(val)
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
if abs != 0 {
|
||||
if abs < 1e-6 || abs >= 1e21 {
|
||||
fmt = 'e'
|
||||
}
|
||||
}
|
||||
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64)
|
||||
}
|
||||
|
||||
// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat64Lossy(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
}
|
||||
if val > 0x4ffffff {
|
||||
stream.WriteFloat64(val)
|
||||
return
|
||||
}
|
||||
precision := 6
|
||||
exp := uint64(1000000) // 6
|
||||
lval := uint64(val*float64(exp) + 0.5)
|
||||
stream.WriteUint64(lval / exp)
|
||||
fval := lval % exp
|
||||
if fval == 0 {
|
||||
return
|
||||
}
|
||||
stream.writeByte('.')
|
||||
for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
|
||||
stream.writeByte('0')
|
||||
}
|
||||
stream.WriteUint64(fval)
|
||||
for stream.buf[len(stream.buf)-1] == '0' {
|
||||
stream.buf = stream.buf[:len(stream.buf)-1]
|
||||
}
|
||||
}
|
190
vendor/github.com/json-iterator/go/stream_int.go
generated
vendored
Normal file
190
vendor/github.com/json-iterator/go/stream_int.go
generated
vendored
Normal file
|
@ -0,0 +1,190 @@
|
|||
package jsoniter
|
||||
|
||||
var digits []uint32
|
||||
|
||||
func init() {
|
||||
digits = make([]uint32, 1000)
|
||||
for i := uint32(0); i < 1000; i++ {
|
||||
digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
|
||||
if i < 10 {
|
||||
digits[i] += 2 << 24
|
||||
} else if i < 100 {
|
||||
digits[i] += 1 << 24
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func writeFirstBuf(space []byte, v uint32) []byte {
|
||||
start := v >> 24
|
||||
if start == 0 {
|
||||
space = append(space, byte(v>>16), byte(v>>8))
|
||||
} else if start == 1 {
|
||||
space = append(space, byte(v>>8))
|
||||
}
|
||||
space = append(space, byte(v))
|
||||
return space
|
||||
}
|
||||
|
||||
func writeBuf(buf []byte, v uint32) []byte {
|
||||
return append(buf, byte(v>>16), byte(v>>8), byte(v))
|
||||
}
|
||||
|
||||
// WriteUint8 write uint8 to stream
|
||||
func (stream *Stream) WriteUint8(val uint8) {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
}
|
||||
|
||||
// WriteInt8 write int8 to stream
|
||||
func (stream *Stream) WriteInt8(nval int8) {
|
||||
var val uint8
|
||||
if nval < 0 {
|
||||
val = uint8(-nval)
|
||||
stream.buf = append(stream.buf, '-')
|
||||
} else {
|
||||
val = uint8(nval)
|
||||
}
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
}
|
||||
|
||||
// WriteUint16 write uint16 to stream
|
||||
func (stream *Stream) WriteUint16(val uint16) {
|
||||
q1 := val / 1000
|
||||
if q1 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
return
|
||||
}
|
||||
r1 := val - q1*1000
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q1])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
|
||||
// WriteInt16 write int16 to stream
|
||||
func (stream *Stream) WriteInt16(nval int16) {
|
||||
var val uint16
|
||||
if nval < 0 {
|
||||
val = uint16(-nval)
|
||||
stream.buf = append(stream.buf, '-')
|
||||
} else {
|
||||
val = uint16(nval)
|
||||
}
|
||||
stream.WriteUint16(val)
|
||||
}
|
||||
|
||||
// WriteUint32 write uint32 to stream
|
||||
func (stream *Stream) WriteUint32(val uint32) {
|
||||
q1 := val / 1000
|
||||
if q1 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
return
|
||||
}
|
||||
r1 := val - q1*1000
|
||||
q2 := q1 / 1000
|
||||
if q2 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q1])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r2 := q1 - q2*1000
|
||||
q3 := q2 / 1000
|
||||
if q3 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q2])
|
||||
} else {
|
||||
r3 := q2 - q3*1000
|
||||
stream.buf = append(stream.buf, byte(q3+'0'))
|
||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||
}
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
}
|
||||
|
||||
// WriteInt32 write int32 to stream
|
||||
func (stream *Stream) WriteInt32(nval int32) {
|
||||
var val uint32
|
||||
if nval < 0 {
|
||||
val = uint32(-nval)
|
||||
stream.buf = append(stream.buf, '-')
|
||||
} else {
|
||||
val = uint32(nval)
|
||||
}
|
||||
stream.WriteUint32(val)
|
||||
}
|
||||
|
||||
// WriteUint64 write uint64 to stream
|
||||
func (stream *Stream) WriteUint64(val uint64) {
|
||||
q1 := val / 1000
|
||||
if q1 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[val])
|
||||
return
|
||||
}
|
||||
r1 := val - q1*1000
|
||||
q2 := q1 / 1000
|
||||
if q2 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q1])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r2 := q1 - q2*1000
|
||||
q3 := q2 / 1000
|
||||
if q3 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r3 := q2 - q3*1000
|
||||
q4 := q3 / 1000
|
||||
if q4 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q3])
|
||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r4 := q3 - q4*1000
|
||||
q5 := q4 / 1000
|
||||
if q5 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q4])
|
||||
stream.buf = writeBuf(stream.buf, digits[r4])
|
||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
return
|
||||
}
|
||||
r5 := q4 - q5*1000
|
||||
q6 := q5 / 1000
|
||||
if q6 == 0 {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q5])
|
||||
} else {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q6])
|
||||
r6 := q5 - q6*1000
|
||||
stream.buf = writeBuf(stream.buf, digits[r6])
|
||||
}
|
||||
stream.buf = writeBuf(stream.buf, digits[r5])
|
||||
stream.buf = writeBuf(stream.buf, digits[r4])
|
||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
stream.buf = writeBuf(stream.buf, digits[r1])
|
||||
}
|
||||
|
||||
// WriteInt64 write int64 to stream
|
||||
func (stream *Stream) WriteInt64(nval int64) {
|
||||
var val uint64
|
||||
if nval < 0 {
|
||||
val = uint64(-nval)
|
||||
stream.buf = append(stream.buf, '-')
|
||||
} else {
|
||||
val = uint64(nval)
|
||||
}
|
||||
stream.WriteUint64(val)
|
||||
}
|
||||
|
||||
// WriteInt write int to stream
|
||||
func (stream *Stream) WriteInt(val int) {
|
||||
stream.WriteInt64(int64(val))
|
||||
}
|
||||
|
||||
// WriteUint write uint to stream
|
||||
func (stream *Stream) WriteUint(val uint) {
|
||||
stream.WriteUint64(uint64(val))
|
||||
}
|
372
vendor/github.com/json-iterator/go/stream_str.go
generated
vendored
Normal file
372
vendor/github.com/json-iterator/go/stream_str.go
generated
vendored
Normal file
|
@ -0,0 +1,372 @@
|
|||
package jsoniter
|
||||
|
||||
import (
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// htmlSafeSet holds the value true if the ASCII character with the given
|
||||
// array position can be safely represented inside a JSON string, embedded
|
||||
// inside of HTML <script> tags, without any additional escaping.
|
||||
//
|
||||
// All values are true except for the ASCII control characters (0-31), the
|
||||
// double quote ("), the backslash character ("\"), HTML opening and closing
|
||||
// tags ("<" and ">"), and the ampersand ("&").
|
||||
var htmlSafeSet = [utf8.RuneSelf]bool{
|
||||
' ': true,
|
||||
'!': true,
|
||||
'"': false,
|
||||
'#': true,
|
||||
'$': true,
|
||||
'%': true,
|
||||
'&': false,
|
||||
'\'': true,
|
||||
'(': true,
|
||||
')': true,
|
||||
'*': true,
|
||||
'+': true,
|
||||
',': true,
|
||||
'-': true,
|
||||
'.': true,
|
||||
'/': true,
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
':': true,
|
||||
';': true,
|
||||
'<': false,
|
||||
'=': true,
|
||||
'>': false,
|
||||
'?': true,
|
||||
'@': true,
|
||||
'A': true,
|
||||
'B': true,
|
||||
'C': true,
|
||||
'D': true,
|
||||
'E': true,
|
||||
'F': true,
|
||||
'G': true,
|
||||
'H': true,
|
||||
'I': true,
|
||||
'J': true,
|
||||
'K': true,
|
||||
'L': true,
|
||||
'M': true,
|
||||
'N': true,
|
||||
'O': true,
|
||||
'P': true,
|
||||
'Q': true,
|
||||
'R': true,
|
||||
'S': true,
|
||||
'T': true,
|
||||
'U': true,
|
||||
'V': true,
|
||||
'W': true,
|
||||
'X': true,
|
||||
'Y': true,
|
||||
'Z': true,
|
||||
'[': true,
|
||||
'\\': false,
|
||||
']': true,
|
||||
'^': true,
|
||||
'_': true,
|
||||
'`': true,
|
||||
'a': true,
|
||||
'b': true,
|
||||
'c': true,
|
||||
'd': true,
|
||||
'e': true,
|
||||
'f': true,
|
||||
'g': true,
|
||||
'h': true,
|
||||
'i': true,
|
||||
'j': true,
|
||||
'k': true,
|
||||
'l': true,
|
||||
'm': true,
|
||||
'n': true,
|
||||
'o': true,
|
||||
'p': true,
|
||||
'q': true,
|
||||
'r': true,
|
||||
's': true,
|
||||
't': true,
|
||||
'u': true,
|
||||
'v': true,
|
||||
'w': true,
|
||||
'x': true,
|
||||
'y': true,
|
||||
'z': true,
|
||||
'{': true,
|
||||
'|': true,
|
||||
'}': true,
|
||||
'~': true,
|
||||
'\u007f': true,
|
||||
}
|
||||
|
||||
// safeSet holds the value true if the ASCII character with the given array
|
||||
// position can be represented inside a JSON string without any further
|
||||
// escaping.
|
||||
//
|
||||
// All values are true except for the ASCII control characters (0-31), the
|
||||
// double quote ("), and the backslash character ("\").
|
||||
var safeSet = [utf8.RuneSelf]bool{
|
||||
' ': true,
|
||||
'!': true,
|
||||
'"': false,
|
||||
'#': true,
|
||||
'$': true,
|
||||
'%': true,
|
||||
'&': true,
|
||||
'\'': true,
|
||||
'(': true,
|
||||
')': true,
|
||||
'*': true,
|
||||
'+': true,
|
||||
',': true,
|
||||
'-': true,
|
||||
'.': true,
|
||||
'/': true,
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
':': true,
|
||||
';': true,
|
||||
'<': true,
|
||||
'=': true,
|
||||
'>': true,
|
||||
'?': true,
|
||||
'@': true,
|
||||
'A': true,
|
||||
'B': true,
|
||||
'C': true,
|
||||
'D': true,
|
||||
'E': true,
|
||||
'F': true,
|
||||
'G': true,
|
||||
'H': true,
|
||||
'I': true,
|
||||
'J': true,
|
||||
'K': true,
|
||||
'L': true,
|
||||
'M': true,
|
||||
'N': true,
|
||||
'O': true,
|
||||
'P': true,
|
||||
'Q': true,
|
||||
'R': true,
|
||||
'S': true,
|
||||
'T': true,
|
||||
'U': true,
|
||||
'V': true,
|
||||
'W': true,
|
||||
'X': true,
|
||||
'Y': true,
|
||||
'Z': true,
|
||||
'[': true,
|
||||
'\\': false,
|
||||
']': true,
|
||||
'^': true,
|
||||
'_': true,
|
||||
'`': true,
|
||||
'a': true,
|
||||
'b': true,
|
||||
'c': true,
|
||||
'd': true,
|
||||
'e': true,
|
||||
'f': true,
|
||||
'g': true,
|
||||
'h': true,
|
||||
'i': true,
|
||||
'j': true,
|
||||
'k': true,
|
||||
'l': true,
|
||||
'm': true,
|
||||
'n': true,
|
||||
'o': true,
|
||||
'p': true,
|
||||
'q': true,
|
||||
'r': true,
|
||||
's': true,
|
||||
't': true,
|
||||
'u': true,
|
||||
'v': true,
|
||||
'w': true,
|
||||
'x': true,
|
||||
'y': true,
|
||||
'z': true,
|
||||
'{': true,
|
||||
'|': true,
|
||||
'}': true,
|
||||
'~': true,
|
||||
'\u007f': true,
|
||||
}
|
||||
|
||||
var hex = "0123456789abcdef"
|
||||
|
||||
// WriteStringWithHTMLEscaped write string to stream with html special characters escaped
|
||||
func (stream *Stream) WriteStringWithHTMLEscaped(s string) {
|
||||
valLen := len(s)
|
||||
stream.buf = append(stream.buf, '"')
|
||||
// write string, the fast path, without utf8 and escape support
|
||||
i := 0
|
||||
for ; i < valLen; i++ {
|
||||
c := s[i]
|
||||
if c < utf8.RuneSelf && htmlSafeSet[c] {
|
||||
stream.buf = append(stream.buf, c)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if i == valLen {
|
||||
stream.buf = append(stream.buf, '"')
|
||||
return
|
||||
}
|
||||
writeStringSlowPathWithHTMLEscaped(stream, i, s, valLen)
|
||||
}
|
||||
|
||||
func writeStringSlowPathWithHTMLEscaped(stream *Stream, i int, s string, valLen int) {
|
||||
start := i
|
||||
// for the remaining parts, we process them char by char
|
||||
for i < valLen {
|
||||
if b := s[i]; b < utf8.RuneSelf {
|
||||
if htmlSafeSet[b] {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if start < i {
|
||||
stream.WriteRaw(s[start:i])
|
||||
}
|
||||
switch b {
|
||||
case '\\', '"':
|
||||
stream.writeTwoBytes('\\', b)
|
||||
case '\n':
|
||||
stream.writeTwoBytes('\\', 'n')
|
||||
case '\r':
|
||||
stream.writeTwoBytes('\\', 'r')
|
||||
case '\t':
|
||||
stream.writeTwoBytes('\\', 't')
|
||||
default:
|
||||
// This encodes bytes < 0x20 except for \t, \n and \r.
|
||||
// If escapeHTML is set, it also escapes <, >, and &
|
||||
// because they can lead to security holes when
|
||||
// user-controlled strings are rendered into JSON
|
||||
// and served to some browsers.
|
||||
stream.WriteRaw(`\u00`)
|
||||
stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
|
||||
}
|
||||
i++
|
||||
start = i
|
||||
continue
|
||||
}
|
||||
c, size := utf8.DecodeRuneInString(s[i:])
|
||||
if c == utf8.RuneError && size == 1 {
|
||||
if start < i {
|
||||
stream.WriteRaw(s[start:i])
|
||||
}
|
||||
stream.WriteRaw(`\ufffd`)
|
||||
i++
|
||||
start = i
|
||||
continue
|
||||
}
|
||||
// U+2028 is LINE SEPARATOR.
|
||||
// U+2029 is PARAGRAPH SEPARATOR.
|
||||
// They are both technically valid characters in JSON strings,
|
||||
// but don't work in JSONP, which has to be evaluated as JavaScript,
|
||||
// and can lead to security holes there. It is valid JSON to
|
||||
// escape them, so we do so unconditionally.
|
||||
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
|
||||
if c == '\u2028' || c == '\u2029' {
|
||||
if start < i {
|
||||
stream.WriteRaw(s[start:i])
|
||||
}
|
||||
stream.WriteRaw(`\u202`)
|
||||
stream.writeByte(hex[c&0xF])
|
||||
i += size
|
||||
start = i
|
||||
continue
|
||||
}
|
||||
i += size
|
||||
}
|
||||
if start < len(s) {
|
||||
stream.WriteRaw(s[start:])
|
||||
}
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
// WriteString write string to stream without html escape
|
||||
func (stream *Stream) WriteString(s string) {
|
||||
valLen := len(s)
|
||||
stream.buf = append(stream.buf, '"')
|
||||
// write string, the fast path, without utf8 and escape support
|
||||
i := 0
|
||||
for ; i < valLen; i++ {
|
||||
c := s[i]
|
||||
if c > 31 && c != '"' && c != '\\' {
|
||||
stream.buf = append(stream.buf, c)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if i == valLen {
|
||||
stream.buf = append(stream.buf, '"')
|
||||
return
|
||||
}
|
||||
writeStringSlowPath(stream, i, s, valLen)
|
||||
}
|
||||
|
||||
func writeStringSlowPath(stream *Stream, i int, s string, valLen int) {
|
||||
start := i
|
||||
// for the remaining parts, we process them char by char
|
||||
for i < valLen {
|
||||
if b := s[i]; b < utf8.RuneSelf {
|
||||
if safeSet[b] {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if start < i {
|
||||
stream.WriteRaw(s[start:i])
|
||||
}
|
||||
switch b {
|
||||
case '\\', '"':
|
||||
stream.writeTwoBytes('\\', b)
|
||||
case '\n':
|
||||
stream.writeTwoBytes('\\', 'n')
|
||||
case '\r':
|
||||
stream.writeTwoBytes('\\', 'r')
|
||||
case '\t':
|
||||
stream.writeTwoBytes('\\', 't')
|
||||
default:
|
||||
// This encodes bytes < 0x20 except for \t, \n and \r.
|
||||
// If escapeHTML is set, it also escapes <, >, and &
|
||||
// because they can lead to security holes when
|
||||
// user-controlled strings are rendered into JSON
|
||||
// and served to some browsers.
|
||||
stream.WriteRaw(`\u00`)
|
||||
stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
|
||||
}
|
||||
i++
|
||||
start = i
|
||||
continue
|
||||
}
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if start < len(s) {
|
||||
stream.WriteRaw(s[start:])
|
||||
}
|
||||
stream.writeByte('"')
|
||||
}
|
12
vendor/github.com/json-iterator/go/test.sh
generated
vendored
Normal file
12
vendor/github.com/json-iterator/go/test.sh
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
echo "" > coverage.txt
|
||||
|
||||
for d in $(go list ./... | grep -v vendor); do
|
||||
go test -coverprofile=profile.out -coverpkg=github.com/json-iterator/go $d
|
||||
if [ -f profile.out ]; then
|
||||
cat profile.out >> coverage.txt
|
||||
rm profile.out
|
||||
fi
|
||||
done
|
14
vendor/github.com/mattn/go-runewidth/.travis.yml
generated
vendored
14
vendor/github.com/mattn/go-runewidth/.travis.yml
generated
vendored
|
@ -1,8 +1,16 @@
|
|||
language: go
|
||||
sudo: false
|
||||
go:
|
||||
- 1.13.x
|
||||
- tip
|
||||
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- $HOME/gopath/bin/goveralls -repotoken lAKAWPzcGsD3A8yBX3BGGtRUdJ6CaGERL
|
||||
- go generate
|
||||
- git diff --cached --exit-code
|
||||
- ./go.test.sh
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
|
|
|
@ -2,7 +2,7 @@ go-runewidth
|
|||
============
|
||||
|
||||
[![Build Status](https://travis-ci.org/mattn/go-runewidth.png?branch=master)](https://travis-ci.org/mattn/go-runewidth)
|
||||
[![Coverage Status](https://coveralls.io/repos/mattn/go-runewidth/badge.png?branch=HEAD)](https://coveralls.io/r/mattn/go-runewidth?branch=HEAD)
|
||||
[![Codecov](https://codecov.io/gh/mattn/go-runewidth/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-runewidth)
|
||||
[![GoDoc](https://godoc.org/github.com/mattn/go-runewidth?status.svg)](http://godoc.org/github.com/mattn/go-runewidth)
|
||||
[![Go Report Card](https://goreportcard.com/badge/github.com/mattn/go-runewidth)](https://goreportcard.com/report/github.com/mattn/go-runewidth)
|
||||
|
12
vendor/github.com/mattn/go-runewidth/go.test.sh
generated
vendored
Normal file
12
vendor/github.com/mattn/go-runewidth/go.test.sh
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
echo "" > coverage.txt
|
||||
|
||||
for d in $(go list ./... | grep -v vendor); do
|
||||
go test -race -coverprofile=profile.out -covermode=atomic "$d"
|
||||
if [ -f profile.out ]; then
|
||||
cat profile.out >> coverage.txt
|
||||
rm profile.out
|
||||
fi
|
||||
done
|
736
vendor/github.com/mattn/go-runewidth/runewidth.go
generated
vendored
736
vendor/github.com/mattn/go-runewidth/runewidth.go
generated
vendored
|
@ -4,6 +4,8 @@ import (
|
|||
"os"
|
||||
)
|
||||
|
||||
//go:generate go run script/generate.go
|
||||
|
||||
var (
|
||||
// EastAsianWidth will be set true if the current locale is CJK
|
||||
EastAsianWidth bool
|
||||
|
@ -48,7 +50,6 @@ func inTables(r rune, ts ...table) bool {
|
|||
}
|
||||
|
||||
func inTable(r rune, t table) bool {
|
||||
// func (t table) IncludesRune(r rune) bool {
|
||||
if r < t[0].first {
|
||||
return false
|
||||
}
|
||||
|
@ -82,728 +83,6 @@ var nonprint = table{
|
|||
{0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFB}, {0xFFFE, 0xFFFF},
|
||||
}
|
||||
|
||||
var combining = table{
|
||||
{0x0300, 0x036F}, {0x0483, 0x0489}, {0x0591, 0x05BD},
|
||||
{0x05BF, 0x05BF}, {0x05C1, 0x05C2}, {0x05C4, 0x05C5},
|
||||
{0x05C7, 0x05C7}, {0x0610, 0x061A}, {0x064B, 0x065F},
|
||||
{0x0670, 0x0670}, {0x06D6, 0x06DC}, {0x06DF, 0x06E4},
|
||||
{0x06E7, 0x06E8}, {0x06EA, 0x06ED}, {0x0711, 0x0711},
|
||||
{0x0730, 0x074A}, {0x07A6, 0x07B0}, {0x07EB, 0x07F3},
|
||||
{0x0816, 0x0819}, {0x081B, 0x0823}, {0x0825, 0x0827},
|
||||
{0x0829, 0x082D}, {0x0859, 0x085B}, {0x08D4, 0x08E1},
|
||||
{0x08E3, 0x0903}, {0x093A, 0x093C}, {0x093E, 0x094F},
|
||||
{0x0951, 0x0957}, {0x0962, 0x0963}, {0x0981, 0x0983},
|
||||
{0x09BC, 0x09BC}, {0x09BE, 0x09C4}, {0x09C7, 0x09C8},
|
||||
{0x09CB, 0x09CD}, {0x09D7, 0x09D7}, {0x09E2, 0x09E3},
|
||||
{0x0A01, 0x0A03}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42},
|
||||
{0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51},
|
||||
{0x0A70, 0x0A71}, {0x0A75, 0x0A75}, {0x0A81, 0x0A83},
|
||||
{0x0ABC, 0x0ABC}, {0x0ABE, 0x0AC5}, {0x0AC7, 0x0AC9},
|
||||
{0x0ACB, 0x0ACD}, {0x0AE2, 0x0AE3}, {0x0B01, 0x0B03},
|
||||
{0x0B3C, 0x0B3C}, {0x0B3E, 0x0B44}, {0x0B47, 0x0B48},
|
||||
{0x0B4B, 0x0B4D}, {0x0B56, 0x0B57}, {0x0B62, 0x0B63},
|
||||
{0x0B82, 0x0B82}, {0x0BBE, 0x0BC2}, {0x0BC6, 0x0BC8},
|
||||
{0x0BCA, 0x0BCD}, {0x0BD7, 0x0BD7}, {0x0C00, 0x0C03},
|
||||
{0x0C3E, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D},
|
||||
{0x0C55, 0x0C56}, {0x0C62, 0x0C63}, {0x0C81, 0x0C83},
|
||||
{0x0CBC, 0x0CBC}, {0x0CBE, 0x0CC4}, {0x0CC6, 0x0CC8},
|
||||
{0x0CCA, 0x0CCD}, {0x0CD5, 0x0CD6}, {0x0CE2, 0x0CE3},
|
||||
{0x0D01, 0x0D03}, {0x0D3E, 0x0D44}, {0x0D46, 0x0D48},
|
||||
{0x0D4A, 0x0D4D}, {0x0D57, 0x0D57}, {0x0D62, 0x0D63},
|
||||
{0x0D82, 0x0D83}, {0x0DCA, 0x0DCA}, {0x0DCF, 0x0DD4},
|
||||
{0x0DD6, 0x0DD6}, {0x0DD8, 0x0DDF}, {0x0DF2, 0x0DF3},
|
||||
{0x0E31, 0x0E31}, {0x0E34, 0x0E3A}, {0x0E47, 0x0E4E},
|
||||
{0x0EB1, 0x0EB1}, {0x0EB4, 0x0EB9}, {0x0EBB, 0x0EBC},
|
||||
{0x0EC8, 0x0ECD}, {0x0F18, 0x0F19}, {0x0F35, 0x0F35},
|
||||
{0x0F37, 0x0F37}, {0x0F39, 0x0F39}, {0x0F3E, 0x0F3F},
|
||||
{0x0F71, 0x0F84}, {0x0F86, 0x0F87}, {0x0F8D, 0x0F97},
|
||||
{0x0F99, 0x0FBC}, {0x0FC6, 0x0FC6}, {0x102B, 0x103E},
|
||||
{0x1056, 0x1059}, {0x105E, 0x1060}, {0x1062, 0x1064},
|
||||
{0x1067, 0x106D}, {0x1071, 0x1074}, {0x1082, 0x108D},
|
||||
{0x108F, 0x108F}, {0x109A, 0x109D}, {0x135D, 0x135F},
|
||||
{0x1712, 0x1714}, {0x1732, 0x1734}, {0x1752, 0x1753},
|
||||
{0x1772, 0x1773}, {0x17B4, 0x17D3}, {0x17DD, 0x17DD},
|
||||
{0x180B, 0x180D}, {0x1885, 0x1886}, {0x18A9, 0x18A9},
|
||||
{0x1920, 0x192B}, {0x1930, 0x193B}, {0x1A17, 0x1A1B},
|
||||
{0x1A55, 0x1A5E}, {0x1A60, 0x1A7C}, {0x1A7F, 0x1A7F},
|
||||
{0x1AB0, 0x1ABE}, {0x1B00, 0x1B04}, {0x1B34, 0x1B44},
|
||||
{0x1B6B, 0x1B73}, {0x1B80, 0x1B82}, {0x1BA1, 0x1BAD},
|
||||
{0x1BE6, 0x1BF3}, {0x1C24, 0x1C37}, {0x1CD0, 0x1CD2},
|
||||
{0x1CD4, 0x1CE8}, {0x1CED, 0x1CED}, {0x1CF2, 0x1CF4},
|
||||
{0x1CF8, 0x1CF9}, {0x1DC0, 0x1DF5}, {0x1DFB, 0x1DFF},
|
||||
{0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2D7F, 0x2D7F},
|
||||
{0x2DE0, 0x2DFF}, {0x302A, 0x302F}, {0x3099, 0x309A},
|
||||
{0xA66F, 0xA672}, {0xA674, 0xA67D}, {0xA69E, 0xA69F},
|
||||
{0xA6F0, 0xA6F1}, {0xA802, 0xA802}, {0xA806, 0xA806},
|
||||
{0xA80B, 0xA80B}, {0xA823, 0xA827}, {0xA880, 0xA881},
|
||||
{0xA8B4, 0xA8C5}, {0xA8E0, 0xA8F1}, {0xA926, 0xA92D},
|
||||
{0xA947, 0xA953}, {0xA980, 0xA983}, {0xA9B3, 0xA9C0},
|
||||
{0xA9E5, 0xA9E5}, {0xAA29, 0xAA36}, {0xAA43, 0xAA43},
|
||||
{0xAA4C, 0xAA4D}, {0xAA7B, 0xAA7D}, {0xAAB0, 0xAAB0},
|
||||
{0xAAB2, 0xAAB4}, {0xAAB7, 0xAAB8}, {0xAABE, 0xAABF},
|
||||
{0xAAC1, 0xAAC1}, {0xAAEB, 0xAAEF}, {0xAAF5, 0xAAF6},
|
||||
{0xABE3, 0xABEA}, {0xABEC, 0xABED}, {0xFB1E, 0xFB1E},
|
||||
{0xFE00, 0xFE0F}, {0xFE20, 0xFE2F}, {0x101FD, 0x101FD},
|
||||
{0x102E0, 0x102E0}, {0x10376, 0x1037A}, {0x10A01, 0x10A03},
|
||||
{0x10A05, 0x10A06}, {0x10A0C, 0x10A0F}, {0x10A38, 0x10A3A},
|
||||
{0x10A3F, 0x10A3F}, {0x10AE5, 0x10AE6}, {0x11000, 0x11002},
|
||||
{0x11038, 0x11046}, {0x1107F, 0x11082}, {0x110B0, 0x110BA},
|
||||
{0x11100, 0x11102}, {0x11127, 0x11134}, {0x11173, 0x11173},
|
||||
{0x11180, 0x11182}, {0x111B3, 0x111C0}, {0x111CA, 0x111CC},
|
||||
{0x1122C, 0x11237}, {0x1123E, 0x1123E}, {0x112DF, 0x112EA},
|
||||
{0x11300, 0x11303}, {0x1133C, 0x1133C}, {0x1133E, 0x11344},
|
||||
{0x11347, 0x11348}, {0x1134B, 0x1134D}, {0x11357, 0x11357},
|
||||
{0x11362, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374},
|
||||
{0x11435, 0x11446}, {0x114B0, 0x114C3}, {0x115AF, 0x115B5},
|
||||
{0x115B8, 0x115C0}, {0x115DC, 0x115DD}, {0x11630, 0x11640},
|
||||
{0x116AB, 0x116B7}, {0x1171D, 0x1172B}, {0x11C2F, 0x11C36},
|
||||
{0x11C38, 0x11C3F}, {0x11C92, 0x11CA7}, {0x11CA9, 0x11CB6},
|
||||
{0x16AF0, 0x16AF4}, {0x16B30, 0x16B36}, {0x16F51, 0x16F7E},
|
||||
{0x16F8F, 0x16F92}, {0x1BC9D, 0x1BC9E}, {0x1D165, 0x1D169},
|
||||
{0x1D16D, 0x1D172}, {0x1D17B, 0x1D182}, {0x1D185, 0x1D18B},
|
||||
{0x1D1AA, 0x1D1AD}, {0x1D242, 0x1D244}, {0x1DA00, 0x1DA36},
|
||||
{0x1DA3B, 0x1DA6C}, {0x1DA75, 0x1DA75}, {0x1DA84, 0x1DA84},
|
||||
{0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006},
|
||||
{0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024},
|
||||
{0x1E026, 0x1E02A}, {0x1E8D0, 0x1E8D6}, {0x1E944, 0x1E94A},
|
||||
{0xE0100, 0xE01EF},
|
||||
}
|
||||
|
||||
var doublewidth = table{
|
||||
{0x1100, 0x115F}, {0x231A, 0x231B}, {0x2329, 0x232A},
|
||||
{0x23E9, 0x23EC}, {0x23F0, 0x23F0}, {0x23F3, 0x23F3},
|
||||
{0x25FD, 0x25FE}, {0x2614, 0x2615}, {0x2648, 0x2653},
|
||||
{0x267F, 0x267F}, {0x2693, 0x2693}, {0x26A1, 0x26A1},
|
||||
{0x26AA, 0x26AB}, {0x26BD, 0x26BE}, {0x26C4, 0x26C5},
|
||||
{0x26CE, 0x26CE}, {0x26D4, 0x26D4}, {0x26EA, 0x26EA},
|
||||
{0x26F2, 0x26F3}, {0x26F5, 0x26F5}, {0x26FA, 0x26FA},
|
||||
{0x26FD, 0x26FD}, {0x2705, 0x2705}, {0x270A, 0x270B},
|
||||
{0x2728, 0x2728}, {0x274C, 0x274C}, {0x274E, 0x274E},
|
||||
{0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797},
|
||||
{0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C},
|
||||
{0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99},
|
||||
{0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB},
|
||||
{0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF},
|
||||
{0x3105, 0x312D}, {0x3131, 0x318E}, {0x3190, 0x31BA},
|
||||
{0x31C0, 0x31E3}, {0x31F0, 0x321E}, {0x3220, 0x3247},
|
||||
{0x3250, 0x32FE}, {0x3300, 0x4DBF}, {0x4E00, 0xA48C},
|
||||
{0xA490, 0xA4C6}, {0xA960, 0xA97C}, {0xAC00, 0xD7A3},
|
||||
{0xF900, 0xFAFF}, {0xFE10, 0xFE19}, {0xFE30, 0xFE52},
|
||||
{0xFE54, 0xFE66}, {0xFE68, 0xFE6B}, {0xFF01, 0xFF60},
|
||||
{0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE0}, {0x17000, 0x187EC},
|
||||
{0x18800, 0x18AF2}, {0x1B000, 0x1B001}, {0x1F004, 0x1F004},
|
||||
{0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A},
|
||||
{0x1F200, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248},
|
||||
{0x1F250, 0x1F251}, {0x1F300, 0x1F320}, {0x1F32D, 0x1F335},
|
||||
{0x1F337, 0x1F37C}, {0x1F37E, 0x1F393}, {0x1F3A0, 0x1F3CA},
|
||||
{0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0}, {0x1F3F4, 0x1F3F4},
|
||||
{0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440}, {0x1F442, 0x1F4FC},
|
||||
{0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E}, {0x1F550, 0x1F567},
|
||||
{0x1F57A, 0x1F57A}, {0x1F595, 0x1F596}, {0x1F5A4, 0x1F5A4},
|
||||
{0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5}, {0x1F6CC, 0x1F6CC},
|
||||
{0x1F6D0, 0x1F6D2}, {0x1F6EB, 0x1F6EC}, {0x1F6F4, 0x1F6F6},
|
||||
{0x1F910, 0x1F91E}, {0x1F920, 0x1F927}, {0x1F930, 0x1F930},
|
||||
{0x1F933, 0x1F93E}, {0x1F940, 0x1F94B}, {0x1F950, 0x1F95E},
|
||||
{0x1F980, 0x1F991}, {0x1F9C0, 0x1F9C0}, {0x20000, 0x2FFFD},
|
||||
{0x30000, 0x3FFFD},
|
||||
}
|
||||
|
||||
var ambiguous = table{
|
||||
{0x00A1, 0x00A1}, {0x00A4, 0x00A4}, {0x00A7, 0x00A8},
|
||||
{0x00AA, 0x00AA}, {0x00AD, 0x00AE}, {0x00B0, 0x00B4},
|
||||
{0x00B6, 0x00BA}, {0x00BC, 0x00BF}, {0x00C6, 0x00C6},
|
||||
{0x00D0, 0x00D0}, {0x00D7, 0x00D8}, {0x00DE, 0x00E1},
|
||||
{0x00E6, 0x00E6}, {0x00E8, 0x00EA}, {0x00EC, 0x00ED},
|
||||
{0x00F0, 0x00F0}, {0x00F2, 0x00F3}, {0x00F7, 0x00FA},
|
||||
{0x00FC, 0x00FC}, {0x00FE, 0x00FE}, {0x0101, 0x0101},
|
||||
{0x0111, 0x0111}, {0x0113, 0x0113}, {0x011B, 0x011B},
|
||||
{0x0126, 0x0127}, {0x012B, 0x012B}, {0x0131, 0x0133},
|
||||
{0x0138, 0x0138}, {0x013F, 0x0142}, {0x0144, 0x0144},
|
||||
{0x0148, 0x014B}, {0x014D, 0x014D}, {0x0152, 0x0153},
|
||||
{0x0166, 0x0167}, {0x016B, 0x016B}, {0x01CE, 0x01CE},
|
||||
{0x01D0, 0x01D0}, {0x01D2, 0x01D2}, {0x01D4, 0x01D4},
|
||||
{0x01D6, 0x01D6}, {0x01D8, 0x01D8}, {0x01DA, 0x01DA},
|
||||
{0x01DC, 0x01DC}, {0x0251, 0x0251}, {0x0261, 0x0261},
|
||||
{0x02C4, 0x02C4}, {0x02C7, 0x02C7}, {0x02C9, 0x02CB},
|
||||
{0x02CD, 0x02CD}, {0x02D0, 0x02D0}, {0x02D8, 0x02DB},
|
||||
{0x02DD, 0x02DD}, {0x02DF, 0x02DF}, {0x0300, 0x036F},
|
||||
{0x0391, 0x03A1}, {0x03A3, 0x03A9}, {0x03B1, 0x03C1},
|
||||
{0x03C3, 0x03C9}, {0x0401, 0x0401}, {0x0410, 0x044F},
|
||||
{0x0451, 0x0451}, {0x2010, 0x2010}, {0x2013, 0x2016},
|
||||
{0x2018, 0x2019}, {0x201C, 0x201D}, {0x2020, 0x2022},
|
||||
{0x2024, 0x2027}, {0x2030, 0x2030}, {0x2032, 0x2033},
|
||||
{0x2035, 0x2035}, {0x203B, 0x203B}, {0x203E, 0x203E},
|
||||
{0x2074, 0x2074}, {0x207F, 0x207F}, {0x2081, 0x2084},
|
||||
{0x20AC, 0x20AC}, {0x2103, 0x2103}, {0x2105, 0x2105},
|
||||
{0x2109, 0x2109}, {0x2113, 0x2113}, {0x2116, 0x2116},
|
||||
{0x2121, 0x2122}, {0x2126, 0x2126}, {0x212B, 0x212B},
|
||||
{0x2153, 0x2154}, {0x215B, 0x215E}, {0x2160, 0x216B},
|
||||
{0x2170, 0x2179}, {0x2189, 0x2189}, {0x2190, 0x2199},
|
||||
{0x21B8, 0x21B9}, {0x21D2, 0x21D2}, {0x21D4, 0x21D4},
|
||||
{0x21E7, 0x21E7}, {0x2200, 0x2200}, {0x2202, 0x2203},
|
||||
{0x2207, 0x2208}, {0x220B, 0x220B}, {0x220F, 0x220F},
|
||||
{0x2211, 0x2211}, {0x2215, 0x2215}, {0x221A, 0x221A},
|
||||
{0x221D, 0x2220}, {0x2223, 0x2223}, {0x2225, 0x2225},
|
||||
{0x2227, 0x222C}, {0x222E, 0x222E}, {0x2234, 0x2237},
|
||||
{0x223C, 0x223D}, {0x2248, 0x2248}, {0x224C, 0x224C},
|
||||
{0x2252, 0x2252}, {0x2260, 0x2261}, {0x2264, 0x2267},
|
||||
{0x226A, 0x226B}, {0x226E, 0x226F}, {0x2282, 0x2283},
|
||||
{0x2286, 0x2287}, {0x2295, 0x2295}, {0x2299, 0x2299},
|
||||
{0x22A5, 0x22A5}, {0x22BF, 0x22BF}, {0x2312, 0x2312},
|
||||
{0x2460, 0x24E9}, {0x24EB, 0x254B}, {0x2550, 0x2573},
|
||||
{0x2580, 0x258F}, {0x2592, 0x2595}, {0x25A0, 0x25A1},
|
||||
{0x25A3, 0x25A9}, {0x25B2, 0x25B3}, {0x25B6, 0x25B7},
|
||||
{0x25BC, 0x25BD}, {0x25C0, 0x25C1}, {0x25C6, 0x25C8},
|
||||
{0x25CB, 0x25CB}, {0x25CE, 0x25D1}, {0x25E2, 0x25E5},
|
||||
{0x25EF, 0x25EF}, {0x2605, 0x2606}, {0x2609, 0x2609},
|
||||
{0x260E, 0x260F}, {0x261C, 0x261C}, {0x261E, 0x261E},
|
||||
{0x2640, 0x2640}, {0x2642, 0x2642}, {0x2660, 0x2661},
|
||||
{0x2663, 0x2665}, {0x2667, 0x266A}, {0x266C, 0x266D},
|
||||
{0x266F, 0x266F}, {0x269E, 0x269F}, {0x26BF, 0x26BF},
|
||||
{0x26C6, 0x26CD}, {0x26CF, 0x26D3}, {0x26D5, 0x26E1},
|
||||
{0x26E3, 0x26E3}, {0x26E8, 0x26E9}, {0x26EB, 0x26F1},
|
||||
{0x26F4, 0x26F4}, {0x26F6, 0x26F9}, {0x26FB, 0x26FC},
|
||||
{0x26FE, 0x26FF}, {0x273D, 0x273D}, {0x2776, 0x277F},
|
||||
{0x2B56, 0x2B59}, {0x3248, 0x324F}, {0xE000, 0xF8FF},
|
||||
{0xFE00, 0xFE0F}, {0xFFFD, 0xFFFD}, {0x1F100, 0x1F10A},
|
||||
{0x1F110, 0x1F12D}, {0x1F130, 0x1F169}, {0x1F170, 0x1F18D},
|
||||
{0x1F18F, 0x1F190}, {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF},
|
||||
{0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD},
|
||||
}
|
||||
|
||||
var emoji = table{
|
||||
{0x203C, 0x203C}, {0x2049, 0x2049}, {0x2122, 0x2122},
|
||||
{0x2139, 0x2139}, {0x2194, 0x2199}, {0x21A9, 0x21AA},
|
||||
{0x231A, 0x231B}, {0x2328, 0x2328}, {0x23CF, 0x23CF},
|
||||
{0x23E9, 0x23F3}, {0x23F8, 0x23FA}, {0x24C2, 0x24C2},
|
||||
{0x25AA, 0x25AB}, {0x25B6, 0x25B6}, {0x25C0, 0x25C0},
|
||||
{0x25FB, 0x25FE}, {0x2600, 0x2604}, {0x260E, 0x260E},
|
||||
{0x2611, 0x2611}, {0x2614, 0x2615}, {0x2618, 0x2618},
|
||||
{0x261D, 0x261D}, {0x2620, 0x2620}, {0x2622, 0x2623},
|
||||
{0x2626, 0x2626}, {0x262A, 0x262A}, {0x262E, 0x262F},
|
||||
{0x2638, 0x263A}, {0x2640, 0x2640}, {0x2642, 0x2642},
|
||||
{0x2648, 0x2653}, {0x265F, 0x2660}, {0x2663, 0x2663},
|
||||
{0x2665, 0x2666}, {0x2668, 0x2668}, {0x267B, 0x267B},
|
||||
{0x267E, 0x267F}, {0x2692, 0x2697}, {0x2699, 0x2699},
|
||||
{0x269B, 0x269C}, {0x26A0, 0x26A1}, {0x26AA, 0x26AB},
|
||||
{0x26B0, 0x26B1}, {0x26BD, 0x26BE}, {0x26C4, 0x26C5},
|
||||
{0x26C8, 0x26C8}, {0x26CE, 0x26CF}, {0x26D1, 0x26D1},
|
||||
{0x26D3, 0x26D4}, {0x26E9, 0x26EA}, {0x26F0, 0x26F5},
|
||||
{0x26F7, 0x26FA}, {0x26FD, 0x26FD}, {0x2702, 0x2702},
|
||||
{0x2705, 0x2705}, {0x2708, 0x270D}, {0x270F, 0x270F},
|
||||
{0x2712, 0x2712}, {0x2714, 0x2714}, {0x2716, 0x2716},
|
||||
{0x271D, 0x271D}, {0x2721, 0x2721}, {0x2728, 0x2728},
|
||||
{0x2733, 0x2734}, {0x2744, 0x2744}, {0x2747, 0x2747},
|
||||
{0x274C, 0x274C}, {0x274E, 0x274E}, {0x2753, 0x2755},
|
||||
{0x2757, 0x2757}, {0x2763, 0x2764}, {0x2795, 0x2797},
|
||||
{0x27A1, 0x27A1}, {0x27B0, 0x27B0}, {0x27BF, 0x27BF},
|
||||
{0x2934, 0x2935}, {0x2B05, 0x2B07}, {0x2B1B, 0x2B1C},
|
||||
{0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x3030, 0x3030},
|
||||
{0x303D, 0x303D}, {0x3297, 0x3297}, {0x3299, 0x3299},
|
||||
{0x1F004, 0x1F004}, {0x1F0CF, 0x1F0CF}, {0x1F170, 0x1F171},
|
||||
{0x1F17E, 0x1F17F}, {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A},
|
||||
{0x1F1E6, 0x1F1FF}, {0x1F201, 0x1F202}, {0x1F21A, 0x1F21A},
|
||||
{0x1F22F, 0x1F22F}, {0x1F232, 0x1F23A}, {0x1F250, 0x1F251},
|
||||
{0x1F300, 0x1F321}, {0x1F324, 0x1F393}, {0x1F396, 0x1F397},
|
||||
{0x1F399, 0x1F39B}, {0x1F39E, 0x1F3F0}, {0x1F3F3, 0x1F3F5},
|
||||
{0x1F3F7, 0x1F4FD}, {0x1F4FF, 0x1F53D}, {0x1F549, 0x1F54E},
|
||||
{0x1F550, 0x1F567}, {0x1F56F, 0x1F570}, {0x1F573, 0x1F57A},
|
||||
{0x1F587, 0x1F587}, {0x1F58A, 0x1F58D}, {0x1F590, 0x1F590},
|
||||
{0x1F595, 0x1F596}, {0x1F5A4, 0x1F5A5}, {0x1F5A8, 0x1F5A8},
|
||||
{0x1F5B1, 0x1F5B2}, {0x1F5BC, 0x1F5BC}, {0x1F5C2, 0x1F5C4},
|
||||
{0x1F5D1, 0x1F5D3}, {0x1F5DC, 0x1F5DE}, {0x1F5E1, 0x1F5E1},
|
||||
{0x1F5E3, 0x1F5E3}, {0x1F5E8, 0x1F5E8}, {0x1F5EF, 0x1F5EF},
|
||||
{0x1F5F3, 0x1F5F3}, {0x1F5FA, 0x1F64F}, {0x1F680, 0x1F6C5},
|
||||
{0x1F6CB, 0x1F6D2}, {0x1F6E0, 0x1F6E5}, {0x1F6E9, 0x1F6E9},
|
||||
{0x1F6EB, 0x1F6EC}, {0x1F6F0, 0x1F6F0}, {0x1F6F3, 0x1F6F9},
|
||||
{0x1F910, 0x1F93A}, {0x1F93C, 0x1F93E}, {0x1F940, 0x1F945},
|
||||
{0x1F947, 0x1F970}, {0x1F973, 0x1F976}, {0x1F97A, 0x1F97A},
|
||||
{0x1F97C, 0x1F9A2}, {0x1F9B0, 0x1F9B9}, {0x1F9C0, 0x1F9C2},
|
||||
{0x1F9D0, 0x1F9FF},
|
||||
}
|
||||
|
||||
var notassigned = table{
|
||||
{0x0378, 0x0379}, {0x0380, 0x0383}, {0x038B, 0x038B},
|
||||
{0x038D, 0x038D}, {0x03A2, 0x03A2}, {0x0530, 0x0530},
|
||||
{0x0557, 0x0558}, {0x0560, 0x0560}, {0x0588, 0x0588},
|
||||
{0x058B, 0x058C}, {0x0590, 0x0590}, {0x05C8, 0x05CF},
|
||||
{0x05EB, 0x05EF}, {0x05F5, 0x05FF}, {0x061D, 0x061D},
|
||||
{0x070E, 0x070E}, {0x074B, 0x074C}, {0x07B2, 0x07BF},
|
||||
{0x07FB, 0x07FF}, {0x082E, 0x082F}, {0x083F, 0x083F},
|
||||
{0x085C, 0x085D}, {0x085F, 0x089F}, {0x08B5, 0x08B5},
|
||||
{0x08BE, 0x08D3}, {0x0984, 0x0984}, {0x098D, 0x098E},
|
||||
{0x0991, 0x0992}, {0x09A9, 0x09A9}, {0x09B1, 0x09B1},
|
||||
{0x09B3, 0x09B5}, {0x09BA, 0x09BB}, {0x09C5, 0x09C6},
|
||||
{0x09C9, 0x09CA}, {0x09CF, 0x09D6}, {0x09D8, 0x09DB},
|
||||
{0x09DE, 0x09DE}, {0x09E4, 0x09E5}, {0x09FC, 0x0A00},
|
||||
{0x0A04, 0x0A04}, {0x0A0B, 0x0A0E}, {0x0A11, 0x0A12},
|
||||
{0x0A29, 0x0A29}, {0x0A31, 0x0A31}, {0x0A34, 0x0A34},
|
||||
{0x0A37, 0x0A37}, {0x0A3A, 0x0A3B}, {0x0A3D, 0x0A3D},
|
||||
{0x0A43, 0x0A46}, {0x0A49, 0x0A4A}, {0x0A4E, 0x0A50},
|
||||
{0x0A52, 0x0A58}, {0x0A5D, 0x0A5D}, {0x0A5F, 0x0A65},
|
||||
{0x0A76, 0x0A80}, {0x0A84, 0x0A84}, {0x0A8E, 0x0A8E},
|
||||
{0x0A92, 0x0A92}, {0x0AA9, 0x0AA9}, {0x0AB1, 0x0AB1},
|
||||
{0x0AB4, 0x0AB4}, {0x0ABA, 0x0ABB}, {0x0AC6, 0x0AC6},
|
||||
{0x0ACA, 0x0ACA}, {0x0ACE, 0x0ACF}, {0x0AD1, 0x0ADF},
|
||||
{0x0AE4, 0x0AE5}, {0x0AF2, 0x0AF8}, {0x0AFA, 0x0B00},
|
||||
{0x0B04, 0x0B04}, {0x0B0D, 0x0B0E}, {0x0B11, 0x0B12},
|
||||
{0x0B29, 0x0B29}, {0x0B31, 0x0B31}, {0x0B34, 0x0B34},
|
||||
{0x0B3A, 0x0B3B}, {0x0B45, 0x0B46}, {0x0B49, 0x0B4A},
|
||||
{0x0B4E, 0x0B55}, {0x0B58, 0x0B5B}, {0x0B5E, 0x0B5E},
|
||||
{0x0B64, 0x0B65}, {0x0B78, 0x0B81}, {0x0B84, 0x0B84},
|
||||
{0x0B8B, 0x0B8D}, {0x0B91, 0x0B91}, {0x0B96, 0x0B98},
|
||||
{0x0B9B, 0x0B9B}, {0x0B9D, 0x0B9D}, {0x0BA0, 0x0BA2},
|
||||
{0x0BA5, 0x0BA7}, {0x0BAB, 0x0BAD}, {0x0BBA, 0x0BBD},
|
||||
{0x0BC3, 0x0BC5}, {0x0BC9, 0x0BC9}, {0x0BCE, 0x0BCF},
|
||||
{0x0BD1, 0x0BD6}, {0x0BD8, 0x0BE5}, {0x0BFB, 0x0BFF},
|
||||
{0x0C04, 0x0C04}, {0x0C0D, 0x0C0D}, {0x0C11, 0x0C11},
|
||||
{0x0C29, 0x0C29}, {0x0C3A, 0x0C3C}, {0x0C45, 0x0C45},
|
||||
{0x0C49, 0x0C49}, {0x0C4E, 0x0C54}, {0x0C57, 0x0C57},
|
||||
{0x0C5B, 0x0C5F}, {0x0C64, 0x0C65}, {0x0C70, 0x0C77},
|
||||
{0x0C84, 0x0C84}, {0x0C8D, 0x0C8D}, {0x0C91, 0x0C91},
|
||||
{0x0CA9, 0x0CA9}, {0x0CB4, 0x0CB4}, {0x0CBA, 0x0CBB},
|
||||
{0x0CC5, 0x0CC5}, {0x0CC9, 0x0CC9}, {0x0CCE, 0x0CD4},
|
||||
{0x0CD7, 0x0CDD}, {0x0CDF, 0x0CDF}, {0x0CE4, 0x0CE5},
|
||||
{0x0CF0, 0x0CF0}, {0x0CF3, 0x0D00}, {0x0D04, 0x0D04},
|
||||
{0x0D0D, 0x0D0D}, {0x0D11, 0x0D11}, {0x0D3B, 0x0D3C},
|
||||
{0x0D45, 0x0D45}, {0x0D49, 0x0D49}, {0x0D50, 0x0D53},
|
||||
{0x0D64, 0x0D65}, {0x0D80, 0x0D81}, {0x0D84, 0x0D84},
|
||||
{0x0D97, 0x0D99}, {0x0DB2, 0x0DB2}, {0x0DBC, 0x0DBC},
|
||||
{0x0DBE, 0x0DBF}, {0x0DC7, 0x0DC9}, {0x0DCB, 0x0DCE},
|
||||
{0x0DD5, 0x0DD5}, {0x0DD7, 0x0DD7}, {0x0DE0, 0x0DE5},
|
||||
{0x0DF0, 0x0DF1}, {0x0DF5, 0x0E00}, {0x0E3B, 0x0E3E},
|
||||
{0x0E5C, 0x0E80}, {0x0E83, 0x0E83}, {0x0E85, 0x0E86},
|
||||
{0x0E89, 0x0E89}, {0x0E8B, 0x0E8C}, {0x0E8E, 0x0E93},
|
||||
{0x0E98, 0x0E98}, {0x0EA0, 0x0EA0}, {0x0EA4, 0x0EA4},
|
||||
{0x0EA6, 0x0EA6}, {0x0EA8, 0x0EA9}, {0x0EAC, 0x0EAC},
|
||||
{0x0EBA, 0x0EBA}, {0x0EBE, 0x0EBF}, {0x0EC5, 0x0EC5},
|
||||
{0x0EC7, 0x0EC7}, {0x0ECE, 0x0ECF}, {0x0EDA, 0x0EDB},
|
||||
{0x0EE0, 0x0EFF}, {0x0F48, 0x0F48}, {0x0F6D, 0x0F70},
|
||||
{0x0F98, 0x0F98}, {0x0FBD, 0x0FBD}, {0x0FCD, 0x0FCD},
|
||||
{0x0FDB, 0x0FFF}, {0x10C6, 0x10C6}, {0x10C8, 0x10CC},
|
||||
{0x10CE, 0x10CF}, {0x1249, 0x1249}, {0x124E, 0x124F},
|
||||
{0x1257, 0x1257}, {0x1259, 0x1259}, {0x125E, 0x125F},
|
||||
{0x1289, 0x1289}, {0x128E, 0x128F}, {0x12B1, 0x12B1},
|
||||
{0x12B6, 0x12B7}, {0x12BF, 0x12BF}, {0x12C1, 0x12C1},
|
||||
{0x12C6, 0x12C7}, {0x12D7, 0x12D7}, {0x1311, 0x1311},
|
||||
{0x1316, 0x1317}, {0x135B, 0x135C}, {0x137D, 0x137F},
|
||||
{0x139A, 0x139F}, {0x13F6, 0x13F7}, {0x13FE, 0x13FF},
|
||||
{0x169D, 0x169F}, {0x16F9, 0x16FF}, {0x170D, 0x170D},
|
||||
{0x1715, 0x171F}, {0x1737, 0x173F}, {0x1754, 0x175F},
|
||||
{0x176D, 0x176D}, {0x1771, 0x1771}, {0x1774, 0x177F},
|
||||
{0x17DE, 0x17DF}, {0x17EA, 0x17EF}, {0x17FA, 0x17FF},
|
||||
{0x180F, 0x180F}, {0x181A, 0x181F}, {0x1878, 0x187F},
|
||||
{0x18AB, 0x18AF}, {0x18F6, 0x18FF}, {0x191F, 0x191F},
|
||||
{0x192C, 0x192F}, {0x193C, 0x193F}, {0x1941, 0x1943},
|
||||
{0x196E, 0x196F}, {0x1975, 0x197F}, {0x19AC, 0x19AF},
|
||||
{0x19CA, 0x19CF}, {0x19DB, 0x19DD}, {0x1A1C, 0x1A1D},
|
||||
{0x1A5F, 0x1A5F}, {0x1A7D, 0x1A7E}, {0x1A8A, 0x1A8F},
|
||||
{0x1A9A, 0x1A9F}, {0x1AAE, 0x1AAF}, {0x1ABF, 0x1AFF},
|
||||
{0x1B4C, 0x1B4F}, {0x1B7D, 0x1B7F}, {0x1BF4, 0x1BFB},
|
||||
{0x1C38, 0x1C3A}, {0x1C4A, 0x1C4C}, {0x1C89, 0x1CBF},
|
||||
{0x1CC8, 0x1CCF}, {0x1CF7, 0x1CF7}, {0x1CFA, 0x1CFF},
|
||||
{0x1DF6, 0x1DFA}, {0x1F16, 0x1F17}, {0x1F1E, 0x1F1F},
|
||||
{0x1F46, 0x1F47}, {0x1F4E, 0x1F4F}, {0x1F58, 0x1F58},
|
||||
{0x1F5A, 0x1F5A}, {0x1F5C, 0x1F5C}, {0x1F5E, 0x1F5E},
|
||||
{0x1F7E, 0x1F7F}, {0x1FB5, 0x1FB5}, {0x1FC5, 0x1FC5},
|
||||
{0x1FD4, 0x1FD5}, {0x1FDC, 0x1FDC}, {0x1FF0, 0x1FF1},
|
||||
{0x1FF5, 0x1FF5}, {0x1FFF, 0x1FFF}, {0x2065, 0x2065},
|
||||
{0x2072, 0x2073}, {0x208F, 0x208F}, {0x209D, 0x209F},
|
||||
{0x20BF, 0x20CF}, {0x20F1, 0x20FF}, {0x218C, 0x218F},
|
||||
{0x23FF, 0x23FF}, {0x2427, 0x243F}, {0x244B, 0x245F},
|
||||
{0x2B74, 0x2B75}, {0x2B96, 0x2B97}, {0x2BBA, 0x2BBC},
|
||||
{0x2BC9, 0x2BC9}, {0x2BD2, 0x2BEB}, {0x2BF0, 0x2BFF},
|
||||
{0x2C2F, 0x2C2F}, {0x2C5F, 0x2C5F}, {0x2CF4, 0x2CF8},
|
||||
{0x2D26, 0x2D26}, {0x2D28, 0x2D2C}, {0x2D2E, 0x2D2F},
|
||||
{0x2D68, 0x2D6E}, {0x2D71, 0x2D7E}, {0x2D97, 0x2D9F},
|
||||
{0x2DA7, 0x2DA7}, {0x2DAF, 0x2DAF}, {0x2DB7, 0x2DB7},
|
||||
{0x2DBF, 0x2DBF}, {0x2DC7, 0x2DC7}, {0x2DCF, 0x2DCF},
|
||||
{0x2DD7, 0x2DD7}, {0x2DDF, 0x2DDF}, {0x2E45, 0x2E7F},
|
||||
{0x2E9A, 0x2E9A}, {0x2EF4, 0x2EFF}, {0x2FD6, 0x2FEF},
|
||||
{0x2FFC, 0x2FFF}, {0x3040, 0x3040}, {0x3097, 0x3098},
|
||||
{0x3100, 0x3104}, {0x312E, 0x3130}, {0x318F, 0x318F},
|
||||
{0x31BB, 0x31BF}, {0x31E4, 0x31EF}, {0x321F, 0x321F},
|
||||
{0x32FF, 0x32FF}, {0x4DB6, 0x4DBF}, {0x9FD6, 0x9FFF},
|
||||
{0xA48D, 0xA48F}, {0xA4C7, 0xA4CF}, {0xA62C, 0xA63F},
|
||||
{0xA6F8, 0xA6FF}, {0xA7AF, 0xA7AF}, {0xA7B8, 0xA7F6},
|
||||
{0xA82C, 0xA82F}, {0xA83A, 0xA83F}, {0xA878, 0xA87F},
|
||||
{0xA8C6, 0xA8CD}, {0xA8DA, 0xA8DF}, {0xA8FE, 0xA8FF},
|
||||
{0xA954, 0xA95E}, {0xA97D, 0xA97F}, {0xA9CE, 0xA9CE},
|
||||
{0xA9DA, 0xA9DD}, {0xA9FF, 0xA9FF}, {0xAA37, 0xAA3F},
|
||||
{0xAA4E, 0xAA4F}, {0xAA5A, 0xAA5B}, {0xAAC3, 0xAADA},
|
||||
{0xAAF7, 0xAB00}, {0xAB07, 0xAB08}, {0xAB0F, 0xAB10},
|
||||
{0xAB17, 0xAB1F}, {0xAB27, 0xAB27}, {0xAB2F, 0xAB2F},
|
||||
{0xAB66, 0xAB6F}, {0xABEE, 0xABEF}, {0xABFA, 0xABFF},
|
||||
{0xD7A4, 0xD7AF}, {0xD7C7, 0xD7CA}, {0xD7FC, 0xD7FF},
|
||||
{0xFA6E, 0xFA6F}, {0xFADA, 0xFAFF}, {0xFB07, 0xFB12},
|
||||
{0xFB18, 0xFB1C}, {0xFB37, 0xFB37}, {0xFB3D, 0xFB3D},
|
||||
{0xFB3F, 0xFB3F}, {0xFB42, 0xFB42}, {0xFB45, 0xFB45},
|
||||
{0xFBC2, 0xFBD2}, {0xFD40, 0xFD4F}, {0xFD90, 0xFD91},
|
||||
{0xFDC8, 0xFDEF}, {0xFDFE, 0xFDFF}, {0xFE1A, 0xFE1F},
|
||||
{0xFE53, 0xFE53}, {0xFE67, 0xFE67}, {0xFE6C, 0xFE6F},
|
||||
{0xFE75, 0xFE75}, {0xFEFD, 0xFEFE}, {0xFF00, 0xFF00},
|
||||
{0xFFBF, 0xFFC1}, {0xFFC8, 0xFFC9}, {0xFFD0, 0xFFD1},
|
||||
{0xFFD8, 0xFFD9}, {0xFFDD, 0xFFDF}, {0xFFE7, 0xFFE7},
|
||||
{0xFFEF, 0xFFF8}, {0xFFFE, 0xFFFF}, {0x1000C, 0x1000C},
|
||||
{0x10027, 0x10027}, {0x1003B, 0x1003B}, {0x1003E, 0x1003E},
|
||||
{0x1004E, 0x1004F}, {0x1005E, 0x1007F}, {0x100FB, 0x100FF},
|
||||
{0x10103, 0x10106}, {0x10134, 0x10136}, {0x1018F, 0x1018F},
|
||||
{0x1019C, 0x1019F}, {0x101A1, 0x101CF}, {0x101FE, 0x1027F},
|
||||
{0x1029D, 0x1029F}, {0x102D1, 0x102DF}, {0x102FC, 0x102FF},
|
||||
{0x10324, 0x1032F}, {0x1034B, 0x1034F}, {0x1037B, 0x1037F},
|
||||
{0x1039E, 0x1039E}, {0x103C4, 0x103C7}, {0x103D6, 0x103FF},
|
||||
{0x1049E, 0x1049F}, {0x104AA, 0x104AF}, {0x104D4, 0x104D7},
|
||||
{0x104FC, 0x104FF}, {0x10528, 0x1052F}, {0x10564, 0x1056E},
|
||||
{0x10570, 0x105FF}, {0x10737, 0x1073F}, {0x10756, 0x1075F},
|
||||
{0x10768, 0x107FF}, {0x10806, 0x10807}, {0x10809, 0x10809},
|
||||
{0x10836, 0x10836}, {0x10839, 0x1083B}, {0x1083D, 0x1083E},
|
||||
{0x10856, 0x10856}, {0x1089F, 0x108A6}, {0x108B0, 0x108DF},
|
||||
{0x108F3, 0x108F3}, {0x108F6, 0x108FA}, {0x1091C, 0x1091E},
|
||||
{0x1093A, 0x1093E}, {0x10940, 0x1097F}, {0x109B8, 0x109BB},
|
||||
{0x109D0, 0x109D1}, {0x10A04, 0x10A04}, {0x10A07, 0x10A0B},
|
||||
{0x10A14, 0x10A14}, {0x10A18, 0x10A18}, {0x10A34, 0x10A37},
|
||||
{0x10A3B, 0x10A3E}, {0x10A48, 0x10A4F}, {0x10A59, 0x10A5F},
|
||||
{0x10AA0, 0x10ABF}, {0x10AE7, 0x10AEA}, {0x10AF7, 0x10AFF},
|
||||
{0x10B36, 0x10B38}, {0x10B56, 0x10B57}, {0x10B73, 0x10B77},
|
||||
{0x10B92, 0x10B98}, {0x10B9D, 0x10BA8}, {0x10BB0, 0x10BFF},
|
||||
{0x10C49, 0x10C7F}, {0x10CB3, 0x10CBF}, {0x10CF3, 0x10CF9},
|
||||
{0x10D00, 0x10E5F}, {0x10E7F, 0x10FFF}, {0x1104E, 0x11051},
|
||||
{0x11070, 0x1107E}, {0x110C2, 0x110CF}, {0x110E9, 0x110EF},
|
||||
{0x110FA, 0x110FF}, {0x11135, 0x11135}, {0x11144, 0x1114F},
|
||||
{0x11177, 0x1117F}, {0x111CE, 0x111CF}, {0x111E0, 0x111E0},
|
||||
{0x111F5, 0x111FF}, {0x11212, 0x11212}, {0x1123F, 0x1127F},
|
||||
{0x11287, 0x11287}, {0x11289, 0x11289}, {0x1128E, 0x1128E},
|
||||
{0x1129E, 0x1129E}, {0x112AA, 0x112AF}, {0x112EB, 0x112EF},
|
||||
{0x112FA, 0x112FF}, {0x11304, 0x11304}, {0x1130D, 0x1130E},
|
||||
{0x11311, 0x11312}, {0x11329, 0x11329}, {0x11331, 0x11331},
|
||||
{0x11334, 0x11334}, {0x1133A, 0x1133B}, {0x11345, 0x11346},
|
||||
{0x11349, 0x1134A}, {0x1134E, 0x1134F}, {0x11351, 0x11356},
|
||||
{0x11358, 0x1135C}, {0x11364, 0x11365}, {0x1136D, 0x1136F},
|
||||
{0x11375, 0x113FF}, {0x1145A, 0x1145A}, {0x1145C, 0x1145C},
|
||||
{0x1145E, 0x1147F}, {0x114C8, 0x114CF}, {0x114DA, 0x1157F},
|
||||
{0x115B6, 0x115B7}, {0x115DE, 0x115FF}, {0x11645, 0x1164F},
|
||||
{0x1165A, 0x1165F}, {0x1166D, 0x1167F}, {0x116B8, 0x116BF},
|
||||
{0x116CA, 0x116FF}, {0x1171A, 0x1171C}, {0x1172C, 0x1172F},
|
||||
{0x11740, 0x1189F}, {0x118F3, 0x118FE}, {0x11900, 0x11ABF},
|
||||
{0x11AF9, 0x11BFF}, {0x11C09, 0x11C09}, {0x11C37, 0x11C37},
|
||||
{0x11C46, 0x11C4F}, {0x11C6D, 0x11C6F}, {0x11C90, 0x11C91},
|
||||
{0x11CA8, 0x11CA8}, {0x11CB7, 0x11FFF}, {0x1239A, 0x123FF},
|
||||
{0x1246F, 0x1246F}, {0x12475, 0x1247F}, {0x12544, 0x12FFF},
|
||||
{0x1342F, 0x143FF}, {0x14647, 0x167FF}, {0x16A39, 0x16A3F},
|
||||
{0x16A5F, 0x16A5F}, {0x16A6A, 0x16A6D}, {0x16A70, 0x16ACF},
|
||||
{0x16AEE, 0x16AEF}, {0x16AF6, 0x16AFF}, {0x16B46, 0x16B4F},
|
||||
{0x16B5A, 0x16B5A}, {0x16B62, 0x16B62}, {0x16B78, 0x16B7C},
|
||||
{0x16B90, 0x16EFF}, {0x16F45, 0x16F4F}, {0x16F7F, 0x16F8E},
|
||||
{0x16FA0, 0x16FDF}, {0x16FE1, 0x16FFF}, {0x187ED, 0x187FF},
|
||||
{0x18AF3, 0x1AFFF}, {0x1B002, 0x1BBFF}, {0x1BC6B, 0x1BC6F},
|
||||
{0x1BC7D, 0x1BC7F}, {0x1BC89, 0x1BC8F}, {0x1BC9A, 0x1BC9B},
|
||||
{0x1BCA4, 0x1CFFF}, {0x1D0F6, 0x1D0FF}, {0x1D127, 0x1D128},
|
||||
{0x1D1E9, 0x1D1FF}, {0x1D246, 0x1D2FF}, {0x1D357, 0x1D35F},
|
||||
{0x1D372, 0x1D3FF}, {0x1D455, 0x1D455}, {0x1D49D, 0x1D49D},
|
||||
{0x1D4A0, 0x1D4A1}, {0x1D4A3, 0x1D4A4}, {0x1D4A7, 0x1D4A8},
|
||||
{0x1D4AD, 0x1D4AD}, {0x1D4BA, 0x1D4BA}, {0x1D4BC, 0x1D4BC},
|
||||
{0x1D4C4, 0x1D4C4}, {0x1D506, 0x1D506}, {0x1D50B, 0x1D50C},
|
||||
{0x1D515, 0x1D515}, {0x1D51D, 0x1D51D}, {0x1D53A, 0x1D53A},
|
||||
{0x1D53F, 0x1D53F}, {0x1D545, 0x1D545}, {0x1D547, 0x1D549},
|
||||
{0x1D551, 0x1D551}, {0x1D6A6, 0x1D6A7}, {0x1D7CC, 0x1D7CD},
|
||||
{0x1DA8C, 0x1DA9A}, {0x1DAA0, 0x1DAA0}, {0x1DAB0, 0x1DFFF},
|
||||
{0x1E007, 0x1E007}, {0x1E019, 0x1E01A}, {0x1E022, 0x1E022},
|
||||
{0x1E025, 0x1E025}, {0x1E02B, 0x1E7FF}, {0x1E8C5, 0x1E8C6},
|
||||
{0x1E8D7, 0x1E8FF}, {0x1E94B, 0x1E94F}, {0x1E95A, 0x1E95D},
|
||||
{0x1E960, 0x1EDFF}, {0x1EE04, 0x1EE04}, {0x1EE20, 0x1EE20},
|
||||
{0x1EE23, 0x1EE23}, {0x1EE25, 0x1EE26}, {0x1EE28, 0x1EE28},
|
||||
{0x1EE33, 0x1EE33}, {0x1EE38, 0x1EE38}, {0x1EE3A, 0x1EE3A},
|
||||
{0x1EE3C, 0x1EE41}, {0x1EE43, 0x1EE46}, {0x1EE48, 0x1EE48},
|
||||
{0x1EE4A, 0x1EE4A}, {0x1EE4C, 0x1EE4C}, {0x1EE50, 0x1EE50},
|
||||
{0x1EE53, 0x1EE53}, {0x1EE55, 0x1EE56}, {0x1EE58, 0x1EE58},
|
||||
{0x1EE5A, 0x1EE5A}, {0x1EE5C, 0x1EE5C}, {0x1EE5E, 0x1EE5E},
|
||||
{0x1EE60, 0x1EE60}, {0x1EE63, 0x1EE63}, {0x1EE65, 0x1EE66},
|
||||
{0x1EE6B, 0x1EE6B}, {0x1EE73, 0x1EE73}, {0x1EE78, 0x1EE78},
|
||||
{0x1EE7D, 0x1EE7D}, {0x1EE7F, 0x1EE7F}, {0x1EE8A, 0x1EE8A},
|
||||
{0x1EE9C, 0x1EEA0}, {0x1EEA4, 0x1EEA4}, {0x1EEAA, 0x1EEAA},
|
||||
{0x1EEBC, 0x1EEEF}, {0x1EEF2, 0x1EFFF}, {0x1F02C, 0x1F02F},
|
||||
{0x1F094, 0x1F09F}, {0x1F0AF, 0x1F0B0}, {0x1F0C0, 0x1F0C0},
|
||||
{0x1F0D0, 0x1F0D0}, {0x1F0F6, 0x1F0FF}, {0x1F10D, 0x1F10F},
|
||||
{0x1F12F, 0x1F12F}, {0x1F16C, 0x1F16F}, {0x1F1AD, 0x1F1E5},
|
||||
{0x1F203, 0x1F20F}, {0x1F23C, 0x1F23F}, {0x1F249, 0x1F24F},
|
||||
{0x1F252, 0x1F2FF}, {0x1F6D3, 0x1F6DF}, {0x1F6ED, 0x1F6EF},
|
||||
{0x1F6F7, 0x1F6FF}, {0x1F774, 0x1F77F}, {0x1F7D5, 0x1F7FF},
|
||||
{0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F}, {0x1F85A, 0x1F85F},
|
||||
{0x1F888, 0x1F88F}, {0x1F8AE, 0x1F90F}, {0x1F91F, 0x1F91F},
|
||||
{0x1F928, 0x1F92F}, {0x1F931, 0x1F932}, {0x1F93F, 0x1F93F},
|
||||
{0x1F94C, 0x1F94F}, {0x1F95F, 0x1F97F}, {0x1F992, 0x1F9BF},
|
||||
{0x1F9C1, 0x1FFFF}, {0x2A6D7, 0x2A6FF}, {0x2B735, 0x2B73F},
|
||||
{0x2B81E, 0x2B81F}, {0x2CEA2, 0x2F7FF}, {0x2FA1E, 0xE0000},
|
||||
{0xE0002, 0xE001F}, {0xE0080, 0xE00FF}, {0xE01F0, 0xEFFFF},
|
||||
{0xFFFFE, 0xFFFFF},
|
||||
}
|
||||
|
||||
var neutral = table{
|
||||
{0x0000, 0x001F}, {0x007F, 0x00A0}, {0x00A9, 0x00A9},
|
||||
{0x00AB, 0x00AB}, {0x00B5, 0x00B5}, {0x00BB, 0x00BB},
|
||||
{0x00C0, 0x00C5}, {0x00C7, 0x00CF}, {0x00D1, 0x00D6},
|
||||
{0x00D9, 0x00DD}, {0x00E2, 0x00E5}, {0x00E7, 0x00E7},
|
||||
{0x00EB, 0x00EB}, {0x00EE, 0x00EF}, {0x00F1, 0x00F1},
|
||||
{0x00F4, 0x00F6}, {0x00FB, 0x00FB}, {0x00FD, 0x00FD},
|
||||
{0x00FF, 0x0100}, {0x0102, 0x0110}, {0x0112, 0x0112},
|
||||
{0x0114, 0x011A}, {0x011C, 0x0125}, {0x0128, 0x012A},
|
||||
{0x012C, 0x0130}, {0x0134, 0x0137}, {0x0139, 0x013E},
|
||||
{0x0143, 0x0143}, {0x0145, 0x0147}, {0x014C, 0x014C},
|
||||
{0x014E, 0x0151}, {0x0154, 0x0165}, {0x0168, 0x016A},
|
||||
{0x016C, 0x01CD}, {0x01CF, 0x01CF}, {0x01D1, 0x01D1},
|
||||
{0x01D3, 0x01D3}, {0x01D5, 0x01D5}, {0x01D7, 0x01D7},
|
||||
{0x01D9, 0x01D9}, {0x01DB, 0x01DB}, {0x01DD, 0x0250},
|
||||
{0x0252, 0x0260}, {0x0262, 0x02C3}, {0x02C5, 0x02C6},
|
||||
{0x02C8, 0x02C8}, {0x02CC, 0x02CC}, {0x02CE, 0x02CF},
|
||||
{0x02D1, 0x02D7}, {0x02DC, 0x02DC}, {0x02DE, 0x02DE},
|
||||
{0x02E0, 0x02FF}, {0x0370, 0x0377}, {0x037A, 0x037F},
|
||||
{0x0384, 0x038A}, {0x038C, 0x038C}, {0x038E, 0x0390},
|
||||
{0x03AA, 0x03B0}, {0x03C2, 0x03C2}, {0x03CA, 0x0400},
|
||||
{0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F},
|
||||
{0x0531, 0x0556}, {0x0559, 0x055F}, {0x0561, 0x0587},
|
||||
{0x0589, 0x058A}, {0x058D, 0x058F}, {0x0591, 0x05C7},
|
||||
{0x05D0, 0x05EA}, {0x05F0, 0x05F4}, {0x0600, 0x061C},
|
||||
{0x061E, 0x070D}, {0x070F, 0x074A}, {0x074D, 0x07B1},
|
||||
{0x07C0, 0x07FA}, {0x0800, 0x082D}, {0x0830, 0x083E},
|
||||
{0x0840, 0x085B}, {0x085E, 0x085E}, {0x08A0, 0x08B4},
|
||||
{0x08B6, 0x08BD}, {0x08D4, 0x0983}, {0x0985, 0x098C},
|
||||
{0x098F, 0x0990}, {0x0993, 0x09A8}, {0x09AA, 0x09B0},
|
||||
{0x09B2, 0x09B2}, {0x09B6, 0x09B9}, {0x09BC, 0x09C4},
|
||||
{0x09C7, 0x09C8}, {0x09CB, 0x09CE}, {0x09D7, 0x09D7},
|
||||
{0x09DC, 0x09DD}, {0x09DF, 0x09E3}, {0x09E6, 0x09FB},
|
||||
{0x0A01, 0x0A03}, {0x0A05, 0x0A0A}, {0x0A0F, 0x0A10},
|
||||
{0x0A13, 0x0A28}, {0x0A2A, 0x0A30}, {0x0A32, 0x0A33},
|
||||
{0x0A35, 0x0A36}, {0x0A38, 0x0A39}, {0x0A3C, 0x0A3C},
|
||||
{0x0A3E, 0x0A42}, {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D},
|
||||
{0x0A51, 0x0A51}, {0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E},
|
||||
{0x0A66, 0x0A75}, {0x0A81, 0x0A83}, {0x0A85, 0x0A8D},
|
||||
{0x0A8F, 0x0A91}, {0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0},
|
||||
{0x0AB2, 0x0AB3}, {0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5},
|
||||
{0x0AC7, 0x0AC9}, {0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0},
|
||||
{0x0AE0, 0x0AE3}, {0x0AE6, 0x0AF1}, {0x0AF9, 0x0AF9},
|
||||
{0x0B01, 0x0B03}, {0x0B05, 0x0B0C}, {0x0B0F, 0x0B10},
|
||||
{0x0B13, 0x0B28}, {0x0B2A, 0x0B30}, {0x0B32, 0x0B33},
|
||||
{0x0B35, 0x0B39}, {0x0B3C, 0x0B44}, {0x0B47, 0x0B48},
|
||||
{0x0B4B, 0x0B4D}, {0x0B56, 0x0B57}, {0x0B5C, 0x0B5D},
|
||||
{0x0B5F, 0x0B63}, {0x0B66, 0x0B77}, {0x0B82, 0x0B83},
|
||||
{0x0B85, 0x0B8A}, {0x0B8E, 0x0B90}, {0x0B92, 0x0B95},
|
||||
{0x0B99, 0x0B9A}, {0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F},
|
||||
{0x0BA3, 0x0BA4}, {0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9},
|
||||
{0x0BBE, 0x0BC2}, {0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD},
|
||||
{0x0BD0, 0x0BD0}, {0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA},
|
||||
{0x0C00, 0x0C03}, {0x0C05, 0x0C0C}, {0x0C0E, 0x0C10},
|
||||
{0x0C12, 0x0C28}, {0x0C2A, 0x0C39}, {0x0C3D, 0x0C44},
|
||||
{0x0C46, 0x0C48}, {0x0C4A, 0x0C4D}, {0x0C55, 0x0C56},
|
||||
{0x0C58, 0x0C5A}, {0x0C60, 0x0C63}, {0x0C66, 0x0C6F},
|
||||
{0x0C78, 0x0C83}, {0x0C85, 0x0C8C}, {0x0C8E, 0x0C90},
|
||||
{0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9},
|
||||
{0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD},
|
||||
{0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3},
|
||||
{0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D01, 0x0D03},
|
||||
{0x0D05, 0x0D0C}, {0x0D0E, 0x0D10}, {0x0D12, 0x0D3A},
|
||||
{0x0D3D, 0x0D44}, {0x0D46, 0x0D48}, {0x0D4A, 0x0D4F},
|
||||
{0x0D54, 0x0D63}, {0x0D66, 0x0D7F}, {0x0D82, 0x0D83},
|
||||
{0x0D85, 0x0D96}, {0x0D9A, 0x0DB1}, {0x0DB3, 0x0DBB},
|
||||
{0x0DBD, 0x0DBD}, {0x0DC0, 0x0DC6}, {0x0DCA, 0x0DCA},
|
||||
{0x0DCF, 0x0DD4}, {0x0DD6, 0x0DD6}, {0x0DD8, 0x0DDF},
|
||||
{0x0DE6, 0x0DEF}, {0x0DF2, 0x0DF4}, {0x0E01, 0x0E3A},
|
||||
{0x0E3F, 0x0E5B}, {0x0E81, 0x0E82}, {0x0E84, 0x0E84},
|
||||
{0x0E87, 0x0E88}, {0x0E8A, 0x0E8A}, {0x0E8D, 0x0E8D},
|
||||
{0x0E94, 0x0E97}, {0x0E99, 0x0E9F}, {0x0EA1, 0x0EA3},
|
||||
{0x0EA5, 0x0EA5}, {0x0EA7, 0x0EA7}, {0x0EAA, 0x0EAB},
|
||||
{0x0EAD, 0x0EB9}, {0x0EBB, 0x0EBD}, {0x0EC0, 0x0EC4},
|
||||
{0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD}, {0x0ED0, 0x0ED9},
|
||||
{0x0EDC, 0x0EDF}, {0x0F00, 0x0F47}, {0x0F49, 0x0F6C},
|
||||
{0x0F71, 0x0F97}, {0x0F99, 0x0FBC}, {0x0FBE, 0x0FCC},
|
||||
{0x0FCE, 0x0FDA}, {0x1000, 0x10C5}, {0x10C7, 0x10C7},
|
||||
{0x10CD, 0x10CD}, {0x10D0, 0x10FF}, {0x1160, 0x1248},
|
||||
{0x124A, 0x124D}, {0x1250, 0x1256}, {0x1258, 0x1258},
|
||||
{0x125A, 0x125D}, {0x1260, 0x1288}, {0x128A, 0x128D},
|
||||
{0x1290, 0x12B0}, {0x12B2, 0x12B5}, {0x12B8, 0x12BE},
|
||||
{0x12C0, 0x12C0}, {0x12C2, 0x12C5}, {0x12C8, 0x12D6},
|
||||
{0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A},
|
||||
{0x135D, 0x137C}, {0x1380, 0x1399}, {0x13A0, 0x13F5},
|
||||
{0x13F8, 0x13FD}, {0x1400, 0x169C}, {0x16A0, 0x16F8},
|
||||
{0x1700, 0x170C}, {0x170E, 0x1714}, {0x1720, 0x1736},
|
||||
{0x1740, 0x1753}, {0x1760, 0x176C}, {0x176E, 0x1770},
|
||||
{0x1772, 0x1773}, {0x1780, 0x17DD}, {0x17E0, 0x17E9},
|
||||
{0x17F0, 0x17F9}, {0x1800, 0x180E}, {0x1810, 0x1819},
|
||||
{0x1820, 0x1877}, {0x1880, 0x18AA}, {0x18B0, 0x18F5},
|
||||
{0x1900, 0x191E}, {0x1920, 0x192B}, {0x1930, 0x193B},
|
||||
{0x1940, 0x1940}, {0x1944, 0x196D}, {0x1970, 0x1974},
|
||||
{0x1980, 0x19AB}, {0x19B0, 0x19C9}, {0x19D0, 0x19DA},
|
||||
{0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, {0x1A60, 0x1A7C},
|
||||
{0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, {0x1AA0, 0x1AAD},
|
||||
{0x1AB0, 0x1ABE}, {0x1B00, 0x1B4B}, {0x1B50, 0x1B7C},
|
||||
{0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, {0x1C3B, 0x1C49},
|
||||
{0x1C4D, 0x1C88}, {0x1CC0, 0x1CC7}, {0x1CD0, 0x1CF6},
|
||||
{0x1CF8, 0x1CF9}, {0x1D00, 0x1DF5}, {0x1DFB, 0x1F15},
|
||||
{0x1F18, 0x1F1D}, {0x1F20, 0x1F45}, {0x1F48, 0x1F4D},
|
||||
{0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B},
|
||||
{0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4},
|
||||
{0x1FB6, 0x1FC4}, {0x1FC6, 0x1FD3}, {0x1FD6, 0x1FDB},
|
||||
{0x1FDD, 0x1FEF}, {0x1FF2, 0x1FF4}, {0x1FF6, 0x1FFE},
|
||||
{0x2000, 0x200F}, {0x2011, 0x2012}, {0x2017, 0x2017},
|
||||
{0x201A, 0x201B}, {0x201E, 0x201F}, {0x2023, 0x2023},
|
||||
{0x2028, 0x202F}, {0x2031, 0x2031}, {0x2034, 0x2034},
|
||||
{0x2036, 0x203A}, {0x203C, 0x203D}, {0x203F, 0x2064},
|
||||
{0x2066, 0x2071}, {0x2075, 0x207E}, {0x2080, 0x2080},
|
||||
{0x2085, 0x208E}, {0x2090, 0x209C}, {0x20A0, 0x20A8},
|
||||
{0x20AA, 0x20AB}, {0x20AD, 0x20BE}, {0x20D0, 0x20F0},
|
||||
{0x2100, 0x2102}, {0x2104, 0x2104}, {0x2106, 0x2108},
|
||||
{0x210A, 0x2112}, {0x2114, 0x2115}, {0x2117, 0x2120},
|
||||
{0x2123, 0x2125}, {0x2127, 0x212A}, {0x212C, 0x2152},
|
||||
{0x2155, 0x215A}, {0x215F, 0x215F}, {0x216C, 0x216F},
|
||||
{0x217A, 0x2188}, {0x218A, 0x218B}, {0x219A, 0x21B7},
|
||||
{0x21BA, 0x21D1}, {0x21D3, 0x21D3}, {0x21D5, 0x21E6},
|
||||
{0x21E8, 0x21FF}, {0x2201, 0x2201}, {0x2204, 0x2206},
|
||||
{0x2209, 0x220A}, {0x220C, 0x220E}, {0x2210, 0x2210},
|
||||
{0x2212, 0x2214}, {0x2216, 0x2219}, {0x221B, 0x221C},
|
||||
{0x2221, 0x2222}, {0x2224, 0x2224}, {0x2226, 0x2226},
|
||||
{0x222D, 0x222D}, {0x222F, 0x2233}, {0x2238, 0x223B},
|
||||
{0x223E, 0x2247}, {0x2249, 0x224B}, {0x224D, 0x2251},
|
||||
{0x2253, 0x225F}, {0x2262, 0x2263}, {0x2268, 0x2269},
|
||||
{0x226C, 0x226D}, {0x2270, 0x2281}, {0x2284, 0x2285},
|
||||
{0x2288, 0x2294}, {0x2296, 0x2298}, {0x229A, 0x22A4},
|
||||
{0x22A6, 0x22BE}, {0x22C0, 0x2311}, {0x2313, 0x2319},
|
||||
{0x231C, 0x2328}, {0x232B, 0x23E8}, {0x23ED, 0x23EF},
|
||||
{0x23F1, 0x23F2}, {0x23F4, 0x23FE}, {0x2400, 0x2426},
|
||||
{0x2440, 0x244A}, {0x24EA, 0x24EA}, {0x254C, 0x254F},
|
||||
{0x2574, 0x257F}, {0x2590, 0x2591}, {0x2596, 0x259F},
|
||||
{0x25A2, 0x25A2}, {0x25AA, 0x25B1}, {0x25B4, 0x25B5},
|
||||
{0x25B8, 0x25BB}, {0x25BE, 0x25BF}, {0x25C2, 0x25C5},
|
||||
{0x25C9, 0x25CA}, {0x25CC, 0x25CD}, {0x25D2, 0x25E1},
|
||||
{0x25E6, 0x25EE}, {0x25F0, 0x25FC}, {0x25FF, 0x2604},
|
||||
{0x2607, 0x2608}, {0x260A, 0x260D}, {0x2610, 0x2613},
|
||||
{0x2616, 0x261B}, {0x261D, 0x261D}, {0x261F, 0x263F},
|
||||
{0x2641, 0x2641}, {0x2643, 0x2647}, {0x2654, 0x265F},
|
||||
{0x2662, 0x2662}, {0x2666, 0x2666}, {0x266B, 0x266B},
|
||||
{0x266E, 0x266E}, {0x2670, 0x267E}, {0x2680, 0x2692},
|
||||
{0x2694, 0x269D}, {0x26A0, 0x26A0}, {0x26A2, 0x26A9},
|
||||
{0x26AC, 0x26BC}, {0x26C0, 0x26C3}, {0x26E2, 0x26E2},
|
||||
{0x26E4, 0x26E7}, {0x2700, 0x2704}, {0x2706, 0x2709},
|
||||
{0x270C, 0x2727}, {0x2729, 0x273C}, {0x273E, 0x274B},
|
||||
{0x274D, 0x274D}, {0x274F, 0x2752}, {0x2756, 0x2756},
|
||||
{0x2758, 0x2775}, {0x2780, 0x2794}, {0x2798, 0x27AF},
|
||||
{0x27B1, 0x27BE}, {0x27C0, 0x27E5}, {0x27EE, 0x2984},
|
||||
{0x2987, 0x2B1A}, {0x2B1D, 0x2B4F}, {0x2B51, 0x2B54},
|
||||
{0x2B5A, 0x2B73}, {0x2B76, 0x2B95}, {0x2B98, 0x2BB9},
|
||||
{0x2BBD, 0x2BC8}, {0x2BCA, 0x2BD1}, {0x2BEC, 0x2BEF},
|
||||
{0x2C00, 0x2C2E}, {0x2C30, 0x2C5E}, {0x2C60, 0x2CF3},
|
||||
{0x2CF9, 0x2D25}, {0x2D27, 0x2D27}, {0x2D2D, 0x2D2D},
|
||||
{0x2D30, 0x2D67}, {0x2D6F, 0x2D70}, {0x2D7F, 0x2D96},
|
||||
{0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE}, {0x2DB0, 0x2DB6},
|
||||
{0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6}, {0x2DC8, 0x2DCE},
|
||||
{0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE}, {0x2DE0, 0x2E44},
|
||||
{0x303F, 0x303F}, {0x4DC0, 0x4DFF}, {0xA4D0, 0xA62B},
|
||||
{0xA640, 0xA6F7}, {0xA700, 0xA7AE}, {0xA7B0, 0xA7B7},
|
||||
{0xA7F7, 0xA82B}, {0xA830, 0xA839}, {0xA840, 0xA877},
|
||||
{0xA880, 0xA8C5}, {0xA8CE, 0xA8D9}, {0xA8E0, 0xA8FD},
|
||||
{0xA900, 0xA953}, {0xA95F, 0xA95F}, {0xA980, 0xA9CD},
|
||||
{0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE}, {0xAA00, 0xAA36},
|
||||
{0xAA40, 0xAA4D}, {0xAA50, 0xAA59}, {0xAA5C, 0xAAC2},
|
||||
{0xAADB, 0xAAF6}, {0xAB01, 0xAB06}, {0xAB09, 0xAB0E},
|
||||
{0xAB11, 0xAB16}, {0xAB20, 0xAB26}, {0xAB28, 0xAB2E},
|
||||
{0xAB30, 0xAB65}, {0xAB70, 0xABED}, {0xABF0, 0xABF9},
|
||||
{0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, {0xD800, 0xDFFF},
|
||||
{0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB36},
|
||||
{0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41},
|
||||
{0xFB43, 0xFB44}, {0xFB46, 0xFBC1}, {0xFBD3, 0xFD3F},
|
||||
{0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFD},
|
||||
{0xFE20, 0xFE2F}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC},
|
||||
{0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC}, {0x10000, 0x1000B},
|
||||
{0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D},
|
||||
{0x1003F, 0x1004D}, {0x10050, 0x1005D}, {0x10080, 0x100FA},
|
||||
{0x10100, 0x10102}, {0x10107, 0x10133}, {0x10137, 0x1018E},
|
||||
{0x10190, 0x1019B}, {0x101A0, 0x101A0}, {0x101D0, 0x101FD},
|
||||
{0x10280, 0x1029C}, {0x102A0, 0x102D0}, {0x102E0, 0x102FB},
|
||||
{0x10300, 0x10323}, {0x10330, 0x1034A}, {0x10350, 0x1037A},
|
||||
{0x10380, 0x1039D}, {0x1039F, 0x103C3}, {0x103C8, 0x103D5},
|
||||
{0x10400, 0x1049D}, {0x104A0, 0x104A9}, {0x104B0, 0x104D3},
|
||||
{0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563},
|
||||
{0x1056F, 0x1056F}, {0x10600, 0x10736}, {0x10740, 0x10755},
|
||||
{0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808},
|
||||
{0x1080A, 0x10835}, {0x10837, 0x10838}, {0x1083C, 0x1083C},
|
||||
{0x1083F, 0x10855}, {0x10857, 0x1089E}, {0x108A7, 0x108AF},
|
||||
{0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x108FB, 0x1091B},
|
||||
{0x1091F, 0x10939}, {0x1093F, 0x1093F}, {0x10980, 0x109B7},
|
||||
{0x109BC, 0x109CF}, {0x109D2, 0x10A03}, {0x10A05, 0x10A06},
|
||||
{0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A33},
|
||||
{0x10A38, 0x10A3A}, {0x10A3F, 0x10A47}, {0x10A50, 0x10A58},
|
||||
{0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, {0x10AEB, 0x10AF6},
|
||||
{0x10B00, 0x10B35}, {0x10B39, 0x10B55}, {0x10B58, 0x10B72},
|
||||
{0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, {0x10BA9, 0x10BAF},
|
||||
{0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2},
|
||||
{0x10CFA, 0x10CFF}, {0x10E60, 0x10E7E}, {0x11000, 0x1104D},
|
||||
{0x11052, 0x1106F}, {0x1107F, 0x110C1}, {0x110D0, 0x110E8},
|
||||
{0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11143},
|
||||
{0x11150, 0x11176}, {0x11180, 0x111CD}, {0x111D0, 0x111DF},
|
||||
{0x111E1, 0x111F4}, {0x11200, 0x11211}, {0x11213, 0x1123E},
|
||||
{0x11280, 0x11286}, {0x11288, 0x11288}, {0x1128A, 0x1128D},
|
||||
{0x1128F, 0x1129D}, {0x1129F, 0x112A9}, {0x112B0, 0x112EA},
|
||||
{0x112F0, 0x112F9}, {0x11300, 0x11303}, {0x11305, 0x1130C},
|
||||
{0x1130F, 0x11310}, {0x11313, 0x11328}, {0x1132A, 0x11330},
|
||||
{0x11332, 0x11333}, {0x11335, 0x11339}, {0x1133C, 0x11344},
|
||||
{0x11347, 0x11348}, {0x1134B, 0x1134D}, {0x11350, 0x11350},
|
||||
{0x11357, 0x11357}, {0x1135D, 0x11363}, {0x11366, 0x1136C},
|
||||
{0x11370, 0x11374}, {0x11400, 0x11459}, {0x1145B, 0x1145B},
|
||||
{0x1145D, 0x1145D}, {0x11480, 0x114C7}, {0x114D0, 0x114D9},
|
||||
{0x11580, 0x115B5}, {0x115B8, 0x115DD}, {0x11600, 0x11644},
|
||||
{0x11650, 0x11659}, {0x11660, 0x1166C}, {0x11680, 0x116B7},
|
||||
{0x116C0, 0x116C9}, {0x11700, 0x11719}, {0x1171D, 0x1172B},
|
||||
{0x11730, 0x1173F}, {0x118A0, 0x118F2}, {0x118FF, 0x118FF},
|
||||
{0x11AC0, 0x11AF8}, {0x11C00, 0x11C08}, {0x11C0A, 0x11C36},
|
||||
{0x11C38, 0x11C45}, {0x11C50, 0x11C6C}, {0x11C70, 0x11C8F},
|
||||
{0x11C92, 0x11CA7}, {0x11CA9, 0x11CB6}, {0x12000, 0x12399},
|
||||
{0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543},
|
||||
{0x13000, 0x1342E}, {0x14400, 0x14646}, {0x16800, 0x16A38},
|
||||
{0x16A40, 0x16A5E}, {0x16A60, 0x16A69}, {0x16A6E, 0x16A6F},
|
||||
{0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5}, {0x16B00, 0x16B45},
|
||||
{0x16B50, 0x16B59}, {0x16B5B, 0x16B61}, {0x16B63, 0x16B77},
|
||||
{0x16B7D, 0x16B8F}, {0x16F00, 0x16F44}, {0x16F50, 0x16F7E},
|
||||
{0x16F8F, 0x16F9F}, {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C},
|
||||
{0x1BC80, 0x1BC88}, {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3},
|
||||
{0x1D000, 0x1D0F5}, {0x1D100, 0x1D126}, {0x1D129, 0x1D1E8},
|
||||
{0x1D200, 0x1D245}, {0x1D300, 0x1D356}, {0x1D360, 0x1D371},
|
||||
{0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F},
|
||||
{0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC},
|
||||
{0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3},
|
||||
{0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514},
|
||||
{0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E},
|
||||
{0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550},
|
||||
{0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B},
|
||||
{0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006},
|
||||
{0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024},
|
||||
{0x1E026, 0x1E02A}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6},
|
||||
{0x1E900, 0x1E94A}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F},
|
||||
{0x1EE00, 0x1EE03}, {0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22},
|
||||
{0x1EE24, 0x1EE24}, {0x1EE27, 0x1EE27}, {0x1EE29, 0x1EE32},
|
||||
{0x1EE34, 0x1EE37}, {0x1EE39, 0x1EE39}, {0x1EE3B, 0x1EE3B},
|
||||
{0x1EE42, 0x1EE42}, {0x1EE47, 0x1EE47}, {0x1EE49, 0x1EE49},
|
||||
{0x1EE4B, 0x1EE4B}, {0x1EE4D, 0x1EE4F}, {0x1EE51, 0x1EE52},
|
||||
{0x1EE54, 0x1EE54}, {0x1EE57, 0x1EE57}, {0x1EE59, 0x1EE59},
|
||||
{0x1EE5B, 0x1EE5B}, {0x1EE5D, 0x1EE5D}, {0x1EE5F, 0x1EE5F},
|
||||
{0x1EE61, 0x1EE62}, {0x1EE64, 0x1EE64}, {0x1EE67, 0x1EE6A},
|
||||
{0x1EE6C, 0x1EE72}, {0x1EE74, 0x1EE77}, {0x1EE79, 0x1EE7C},
|
||||
{0x1EE7E, 0x1EE7E}, {0x1EE80, 0x1EE89}, {0x1EE8B, 0x1EE9B},
|
||||
{0x1EEA1, 0x1EEA3}, {0x1EEA5, 0x1EEA9}, {0x1EEAB, 0x1EEBB},
|
||||
{0x1EEF0, 0x1EEF1}, {0x1F000, 0x1F003}, {0x1F005, 0x1F02B},
|
||||
{0x1F030, 0x1F093}, {0x1F0A0, 0x1F0AE}, {0x1F0B1, 0x1F0BF},
|
||||
{0x1F0C1, 0x1F0CE}, {0x1F0D1, 0x1F0F5}, {0x1F10B, 0x1F10C},
|
||||
{0x1F12E, 0x1F12E}, {0x1F16A, 0x1F16B}, {0x1F1E6, 0x1F1FF},
|
||||
{0x1F321, 0x1F32C}, {0x1F336, 0x1F336}, {0x1F37D, 0x1F37D},
|
||||
{0x1F394, 0x1F39F}, {0x1F3CB, 0x1F3CE}, {0x1F3D4, 0x1F3DF},
|
||||
{0x1F3F1, 0x1F3F3}, {0x1F3F5, 0x1F3F7}, {0x1F43F, 0x1F43F},
|
||||
{0x1F441, 0x1F441}, {0x1F4FD, 0x1F4FE}, {0x1F53E, 0x1F54A},
|
||||
{0x1F54F, 0x1F54F}, {0x1F568, 0x1F579}, {0x1F57B, 0x1F594},
|
||||
{0x1F597, 0x1F5A3}, {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F},
|
||||
{0x1F6C6, 0x1F6CB}, {0x1F6CD, 0x1F6CF}, {0x1F6E0, 0x1F6EA},
|
||||
{0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773}, {0x1F780, 0x1F7D4},
|
||||
{0x1F800, 0x1F80B}, {0x1F810, 0x1F847}, {0x1F850, 0x1F859},
|
||||
{0x1F860, 0x1F887}, {0x1F890, 0x1F8AD}, {0xE0001, 0xE0001},
|
||||
{0xE0020, 0xE007F},
|
||||
}
|
||||
|
||||
// Condition have flag EastAsianWidth whether the current locale is CJK or not.
|
||||
type Condition struct {
|
||||
EastAsianWidth bool
|
||||
|
@ -822,11 +101,9 @@ func NewCondition() *Condition {
|
|||
// See http://www.unicode.org/reports/tr11/
|
||||
func (c *Condition) RuneWidth(r rune) int {
|
||||
switch {
|
||||
case r < 0 || r > 0x10FFFF ||
|
||||
inTables(r, nonprint, combining, notassigned):
|
||||
case r < 0 || r > 0x10FFFF || inTables(r, nonprint, combining, notassigned):
|
||||
return 0
|
||||
case (c.EastAsianWidth && IsAmbiguousWidth(r)) ||
|
||||
inTables(r, doublewidth, emoji):
|
||||
case (c.EastAsianWidth && IsAmbiguousWidth(r)) || inTables(r, doublewidth):
|
||||
return 2
|
||||
default:
|
||||
return 1
|
||||
|
@ -848,9 +125,12 @@ func (c *Condition) stringWidthZeroJoiner(s string) (width int) {
|
|||
}
|
||||
w := c.RuneWidth(r)
|
||||
if r2 == 0x200D && inTables(r, emoji) && inTables(r1, emoji) {
|
||||
w = 0
|
||||
if width < w {
|
||||
width = w
|
||||
}
|
||||
} else {
|
||||
width += w
|
||||
}
|
||||
r1, r2 = r2, r
|
||||
}
|
||||
return width
|
||||
|
|
5
vendor/github.com/mattn/go-runewidth/runewidth_posix.go
generated
vendored
5
vendor/github.com/mattn/go-runewidth/runewidth_posix.go
generated
vendored
|
@ -62,7 +62,10 @@ func isEastAsian(locale string) bool {
|
|||
|
||||
// IsEastAsian return true if the current locale is CJK
|
||||
func IsEastAsian() bool {
|
||||
locale := os.Getenv("LC_CTYPE")
|
||||
locale := os.Getenv("LC_ALL")
|
||||
if locale == "" {
|
||||
locale = os.Getenv("LC_CTYPE")
|
||||
}
|
||||
if locale == "" {
|
||||
locale = os.Getenv("LANG")
|
||||
}
|
||||
|
|
437
vendor/github.com/mattn/go-runewidth/runewidth_table.go
generated
vendored
Normal file
437
vendor/github.com/mattn/go-runewidth/runewidth_table.go
generated
vendored
Normal file
|
@ -0,0 +1,437 @@
|
|||
// Code generated by script/generate.go. DO NOT EDIT.
|
||||
|
||||
package runewidth
|
||||
|
||||
var combining = table{
|
||||
{0x0300, 0x036F}, {0x0483, 0x0489}, {0x07EB, 0x07F3},
|
||||
{0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0D00, 0x0D01},
|
||||
{0x135D, 0x135F}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1AC0},
|
||||
{0x1B6B, 0x1B73}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF},
|
||||
{0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2DE0, 0x2DFF},
|
||||
{0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D},
|
||||
{0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA8E0, 0xA8F1},
|
||||
{0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x10376, 0x1037A},
|
||||
{0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x11300, 0x11301},
|
||||
{0x1133B, 0x1133C}, {0x11366, 0x1136C}, {0x11370, 0x11374},
|
||||
{0x16AF0, 0x16AF4}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172},
|
||||
{0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD},
|
||||
{0x1D242, 0x1D244}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018},
|
||||
{0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, {0x1E026, 0x1E02A},
|
||||
{0x1E8D0, 0x1E8D6},
|
||||
}
|
||||
|
||||
var doublewidth = table{
|
||||
{0x1100, 0x115F}, {0x231A, 0x231B}, {0x2329, 0x232A},
|
||||
{0x23E9, 0x23EC}, {0x23F0, 0x23F0}, {0x23F3, 0x23F3},
|
||||
{0x25FD, 0x25FE}, {0x2614, 0x2615}, {0x2648, 0x2653},
|
||||
{0x267F, 0x267F}, {0x2693, 0x2693}, {0x26A1, 0x26A1},
|
||||
{0x26AA, 0x26AB}, {0x26BD, 0x26BE}, {0x26C4, 0x26C5},
|
||||
{0x26CE, 0x26CE}, {0x26D4, 0x26D4}, {0x26EA, 0x26EA},
|
||||
{0x26F2, 0x26F3}, {0x26F5, 0x26F5}, {0x26FA, 0x26FA},
|
||||
{0x26FD, 0x26FD}, {0x2705, 0x2705}, {0x270A, 0x270B},
|
||||
{0x2728, 0x2728}, {0x274C, 0x274C}, {0x274E, 0x274E},
|
||||
{0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797},
|
||||
{0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C},
|
||||
{0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99},
|
||||
{0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB},
|
||||
{0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF},
|
||||
{0x3105, 0x312F}, {0x3131, 0x318E}, {0x3190, 0x31E3},
|
||||
{0x31F0, 0x321E}, {0x3220, 0x3247}, {0x3250, 0x4DBF},
|
||||
{0x4E00, 0xA48C}, {0xA490, 0xA4C6}, {0xA960, 0xA97C},
|
||||
{0xAC00, 0xD7A3}, {0xF900, 0xFAFF}, {0xFE10, 0xFE19},
|
||||
{0xFE30, 0xFE52}, {0xFE54, 0xFE66}, {0xFE68, 0xFE6B},
|
||||
{0xFF01, 0xFF60}, {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4},
|
||||
{0x16FF0, 0x16FF1}, {0x17000, 0x187F7}, {0x18800, 0x18CD5},
|
||||
{0x18D00, 0x18D08}, {0x1B000, 0x1B11E}, {0x1B150, 0x1B152},
|
||||
{0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, {0x1F004, 0x1F004},
|
||||
{0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A},
|
||||
{0x1F200, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248},
|
||||
{0x1F250, 0x1F251}, {0x1F260, 0x1F265}, {0x1F300, 0x1F320},
|
||||
{0x1F32D, 0x1F335}, {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393},
|
||||
{0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0},
|
||||
{0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440},
|
||||
{0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E},
|
||||
{0x1F550, 0x1F567}, {0x1F57A, 0x1F57A}, {0x1F595, 0x1F596},
|
||||
{0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5},
|
||||
{0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2}, {0x1F6D5, 0x1F6D7},
|
||||
{0x1F6EB, 0x1F6EC}, {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB},
|
||||
{0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F978},
|
||||
{0x1F97A, 0x1F9CB}, {0x1F9CD, 0x1F9FF}, {0x1FA70, 0x1FA74},
|
||||
{0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA86}, {0x1FA90, 0x1FAA8},
|
||||
{0x1FAB0, 0x1FAB6}, {0x1FAC0, 0x1FAC2}, {0x1FAD0, 0x1FAD6},
|
||||
{0x20000, 0x2FFFD}, {0x30000, 0x3FFFD},
|
||||
}
|
||||
|
||||
var ambiguous = table{
|
||||
{0x00A1, 0x00A1}, {0x00A4, 0x00A4}, {0x00A7, 0x00A8},
|
||||
{0x00AA, 0x00AA}, {0x00AD, 0x00AE}, {0x00B0, 0x00B4},
|
||||
{0x00B6, 0x00BA}, {0x00BC, 0x00BF}, {0x00C6, 0x00C6},
|
||||
{0x00D0, 0x00D0}, {0x00D7, 0x00D8}, {0x00DE, 0x00E1},
|
||||
{0x00E6, 0x00E6}, {0x00E8, 0x00EA}, {0x00EC, 0x00ED},
|
||||
{0x00F0, 0x00F0}, {0x00F2, 0x00F3}, {0x00F7, 0x00FA},
|
||||
{0x00FC, 0x00FC}, {0x00FE, 0x00FE}, {0x0101, 0x0101},
|
||||
{0x0111, 0x0111}, {0x0113, 0x0113}, {0x011B, 0x011B},
|
||||
{0x0126, 0x0127}, {0x012B, 0x012B}, {0x0131, 0x0133},
|
||||
{0x0138, 0x0138}, {0x013F, 0x0142}, {0x0144, 0x0144},
|
||||
{0x0148, 0x014B}, {0x014D, 0x014D}, {0x0152, 0x0153},
|
||||
{0x0166, 0x0167}, {0x016B, 0x016B}, {0x01CE, 0x01CE},
|
||||
{0x01D0, 0x01D0}, {0x01D2, 0x01D2}, {0x01D4, 0x01D4},
|
||||
{0x01D6, 0x01D6}, {0x01D8, 0x01D8}, {0x01DA, 0x01DA},
|
||||
{0x01DC, 0x01DC}, {0x0251, 0x0251}, {0x0261, 0x0261},
|
||||
{0x02C4, 0x02C4}, {0x02C7, 0x02C7}, {0x02C9, 0x02CB},
|
||||
{0x02CD, 0x02CD}, {0x02D0, 0x02D0}, {0x02D8, 0x02DB},
|
||||
{0x02DD, 0x02DD}, {0x02DF, 0x02DF}, {0x0300, 0x036F},
|
||||
{0x0391, 0x03A1}, {0x03A3, 0x03A9}, {0x03B1, 0x03C1},
|
||||
{0x03C3, 0x03C9}, {0x0401, 0x0401}, {0x0410, 0x044F},
|
||||
{0x0451, 0x0451}, {0x2010, 0x2010}, {0x2013, 0x2016},
|
||||
{0x2018, 0x2019}, {0x201C, 0x201D}, {0x2020, 0x2022},
|
||||
{0x2024, 0x2027}, {0x2030, 0x2030}, {0x2032, 0x2033},
|
||||
{0x2035, 0x2035}, {0x203B, 0x203B}, {0x203E, 0x203E},
|
||||
{0x2074, 0x2074}, {0x207F, 0x207F}, {0x2081, 0x2084},
|
||||
{0x20AC, 0x20AC}, {0x2103, 0x2103}, {0x2105, 0x2105},
|
||||
{0x2109, 0x2109}, {0x2113, 0x2113}, {0x2116, 0x2116},
|
||||
{0x2121, 0x2122}, {0x2126, 0x2126}, {0x212B, 0x212B},
|
||||
{0x2153, 0x2154}, {0x215B, 0x215E}, {0x2160, 0x216B},
|
||||
{0x2170, 0x2179}, {0x2189, 0x2189}, {0x2190, 0x2199},
|
||||
{0x21B8, 0x21B9}, {0x21D2, 0x21D2}, {0x21D4, 0x21D4},
|
||||
{0x21E7, 0x21E7}, {0x2200, 0x2200}, {0x2202, 0x2203},
|
||||
{0x2207, 0x2208}, {0x220B, 0x220B}, {0x220F, 0x220F},
|
||||
{0x2211, 0x2211}, {0x2215, 0x2215}, {0x221A, 0x221A},
|
||||
{0x221D, 0x2220}, {0x2223, 0x2223}, {0x2225, 0x2225},
|
||||
{0x2227, 0x222C}, {0x222E, 0x222E}, {0x2234, 0x2237},
|
||||
{0x223C, 0x223D}, {0x2248, 0x2248}, {0x224C, 0x224C},
|
||||
{0x2252, 0x2252}, {0x2260, 0x2261}, {0x2264, 0x2267},
|
||||
{0x226A, 0x226B}, {0x226E, 0x226F}, {0x2282, 0x2283},
|
||||
{0x2286, 0x2287}, {0x2295, 0x2295}, {0x2299, 0x2299},
|
||||
{0x22A5, 0x22A5}, {0x22BF, 0x22BF}, {0x2312, 0x2312},
|
||||
{0x2460, 0x24E9}, {0x24EB, 0x254B}, {0x2550, 0x2573},
|
||||
{0x2580, 0x258F}, {0x2592, 0x2595}, {0x25A0, 0x25A1},
|
||||
{0x25A3, 0x25A9}, {0x25B2, 0x25B3}, {0x25B6, 0x25B7},
|
||||
{0x25BC, 0x25BD}, {0x25C0, 0x25C1}, {0x25C6, 0x25C8},
|
||||
{0x25CB, 0x25CB}, {0x25CE, 0x25D1}, {0x25E2, 0x25E5},
|
||||
{0x25EF, 0x25EF}, {0x2605, 0x2606}, {0x2609, 0x2609},
|
||||
{0x260E, 0x260F}, {0x261C, 0x261C}, {0x261E, 0x261E},
|
||||
{0x2640, 0x2640}, {0x2642, 0x2642}, {0x2660, 0x2661},
|
||||
{0x2663, 0x2665}, {0x2667, 0x266A}, {0x266C, 0x266D},
|
||||
{0x266F, 0x266F}, {0x269E, 0x269F}, {0x26BF, 0x26BF},
|
||||
{0x26C6, 0x26CD}, {0x26CF, 0x26D3}, {0x26D5, 0x26E1},
|
||||
{0x26E3, 0x26E3}, {0x26E8, 0x26E9}, {0x26EB, 0x26F1},
|
||||
{0x26F4, 0x26F4}, {0x26F6, 0x26F9}, {0x26FB, 0x26FC},
|
||||
{0x26FE, 0x26FF}, {0x273D, 0x273D}, {0x2776, 0x277F},
|
||||
{0x2B56, 0x2B59}, {0x3248, 0x324F}, {0xE000, 0xF8FF},
|
||||
{0xFE00, 0xFE0F}, {0xFFFD, 0xFFFD}, {0x1F100, 0x1F10A},
|
||||
{0x1F110, 0x1F12D}, {0x1F130, 0x1F169}, {0x1F170, 0x1F18D},
|
||||
{0x1F18F, 0x1F190}, {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF},
|
||||
{0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD},
|
||||
}
|
||||
var notassigned = table{
|
||||
{0x27E6, 0x27ED}, {0x2985, 0x2986},
|
||||
}
|
||||
|
||||
var neutral = table{
|
||||
{0x0000, 0x001F}, {0x007F, 0x00A0}, {0x00A9, 0x00A9},
|
||||
{0x00AB, 0x00AB}, {0x00B5, 0x00B5}, {0x00BB, 0x00BB},
|
||||
{0x00C0, 0x00C5}, {0x00C7, 0x00CF}, {0x00D1, 0x00D6},
|
||||
{0x00D9, 0x00DD}, {0x00E2, 0x00E5}, {0x00E7, 0x00E7},
|
||||
{0x00EB, 0x00EB}, {0x00EE, 0x00EF}, {0x00F1, 0x00F1},
|
||||
{0x00F4, 0x00F6}, {0x00FB, 0x00FB}, {0x00FD, 0x00FD},
|
||||
{0x00FF, 0x0100}, {0x0102, 0x0110}, {0x0112, 0x0112},
|
||||
{0x0114, 0x011A}, {0x011C, 0x0125}, {0x0128, 0x012A},
|
||||
{0x012C, 0x0130}, {0x0134, 0x0137}, {0x0139, 0x013E},
|
||||
{0x0143, 0x0143}, {0x0145, 0x0147}, {0x014C, 0x014C},
|
||||
{0x014E, 0x0151}, {0x0154, 0x0165}, {0x0168, 0x016A},
|
||||
{0x016C, 0x01CD}, {0x01CF, 0x01CF}, {0x01D1, 0x01D1},
|
||||
{0x01D3, 0x01D3}, {0x01D5, 0x01D5}, {0x01D7, 0x01D7},
|
||||
{0x01D9, 0x01D9}, {0x01DB, 0x01DB}, {0x01DD, 0x0250},
|
||||
{0x0252, 0x0260}, {0x0262, 0x02C3}, {0x02C5, 0x02C6},
|
||||
{0x02C8, 0x02C8}, {0x02CC, 0x02CC}, {0x02CE, 0x02CF},
|
||||
{0x02D1, 0x02D7}, {0x02DC, 0x02DC}, {0x02DE, 0x02DE},
|
||||
{0x02E0, 0x02FF}, {0x0370, 0x0377}, {0x037A, 0x037F},
|
||||
{0x0384, 0x038A}, {0x038C, 0x038C}, {0x038E, 0x0390},
|
||||
{0x03AA, 0x03B0}, {0x03C2, 0x03C2}, {0x03CA, 0x0400},
|
||||
{0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F},
|
||||
{0x0531, 0x0556}, {0x0559, 0x058A}, {0x058D, 0x058F},
|
||||
{0x0591, 0x05C7}, {0x05D0, 0x05EA}, {0x05EF, 0x05F4},
|
||||
{0x0600, 0x061C}, {0x061E, 0x070D}, {0x070F, 0x074A},
|
||||
{0x074D, 0x07B1}, {0x07C0, 0x07FA}, {0x07FD, 0x082D},
|
||||
{0x0830, 0x083E}, {0x0840, 0x085B}, {0x085E, 0x085E},
|
||||
{0x0860, 0x086A}, {0x08A0, 0x08B4}, {0x08B6, 0x08C7},
|
||||
{0x08D3, 0x0983}, {0x0985, 0x098C}, {0x098F, 0x0990},
|
||||
{0x0993, 0x09A8}, {0x09AA, 0x09B0}, {0x09B2, 0x09B2},
|
||||
{0x09B6, 0x09B9}, {0x09BC, 0x09C4}, {0x09C7, 0x09C8},
|
||||
{0x09CB, 0x09CE}, {0x09D7, 0x09D7}, {0x09DC, 0x09DD},
|
||||
{0x09DF, 0x09E3}, {0x09E6, 0x09FE}, {0x0A01, 0x0A03},
|
||||
{0x0A05, 0x0A0A}, {0x0A0F, 0x0A10}, {0x0A13, 0x0A28},
|
||||
{0x0A2A, 0x0A30}, {0x0A32, 0x0A33}, {0x0A35, 0x0A36},
|
||||
{0x0A38, 0x0A39}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42},
|
||||
{0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51},
|
||||
{0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76},
|
||||
{0x0A81, 0x0A83}, {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91},
|
||||
{0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3},
|
||||
{0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9},
|
||||
{0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3},
|
||||
{0x0AE6, 0x0AF1}, {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03},
|
||||
{0x0B05, 0x0B0C}, {0x0B0F, 0x0B10}, {0x0B13, 0x0B28},
|
||||
{0x0B2A, 0x0B30}, {0x0B32, 0x0B33}, {0x0B35, 0x0B39},
|
||||
{0x0B3C, 0x0B44}, {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D},
|
||||
{0x0B55, 0x0B57}, {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63},
|
||||
{0x0B66, 0x0B77}, {0x0B82, 0x0B83}, {0x0B85, 0x0B8A},
|
||||
{0x0B8E, 0x0B90}, {0x0B92, 0x0B95}, {0x0B99, 0x0B9A},
|
||||
{0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4},
|
||||
{0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2},
|
||||
{0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0},
|
||||
{0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C},
|
||||
{0x0C0E, 0x0C10}, {0x0C12, 0x0C28}, {0x0C2A, 0x0C39},
|
||||
{0x0C3D, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D},
|
||||
{0x0C55, 0x0C56}, {0x0C58, 0x0C5A}, {0x0C60, 0x0C63},
|
||||
{0x0C66, 0x0C6F}, {0x0C77, 0x0C8C}, {0x0C8E, 0x0C90},
|
||||
{0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9},
|
||||
{0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD},
|
||||
{0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3},
|
||||
{0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D00, 0x0D0C},
|
||||
{0x0D0E, 0x0D10}, {0x0D12, 0x0D44}, {0x0D46, 0x0D48},
|
||||
{0x0D4A, 0x0D4F}, {0x0D54, 0x0D63}, {0x0D66, 0x0D7F},
|
||||
{0x0D81, 0x0D83}, {0x0D85, 0x0D96}, {0x0D9A, 0x0DB1},
|
||||
{0x0DB3, 0x0DBB}, {0x0DBD, 0x0DBD}, {0x0DC0, 0x0DC6},
|
||||
{0x0DCA, 0x0DCA}, {0x0DCF, 0x0DD4}, {0x0DD6, 0x0DD6},
|
||||
{0x0DD8, 0x0DDF}, {0x0DE6, 0x0DEF}, {0x0DF2, 0x0DF4},
|
||||
{0x0E01, 0x0E3A}, {0x0E3F, 0x0E5B}, {0x0E81, 0x0E82},
|
||||
{0x0E84, 0x0E84}, {0x0E86, 0x0E8A}, {0x0E8C, 0x0EA3},
|
||||
{0x0EA5, 0x0EA5}, {0x0EA7, 0x0EBD}, {0x0EC0, 0x0EC4},
|
||||
{0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD}, {0x0ED0, 0x0ED9},
|
||||
{0x0EDC, 0x0EDF}, {0x0F00, 0x0F47}, {0x0F49, 0x0F6C},
|
||||
{0x0F71, 0x0F97}, {0x0F99, 0x0FBC}, {0x0FBE, 0x0FCC},
|
||||
{0x0FCE, 0x0FDA}, {0x1000, 0x10C5}, {0x10C7, 0x10C7},
|
||||
{0x10CD, 0x10CD}, {0x10D0, 0x10FF}, {0x1160, 0x1248},
|
||||
{0x124A, 0x124D}, {0x1250, 0x1256}, {0x1258, 0x1258},
|
||||
{0x125A, 0x125D}, {0x1260, 0x1288}, {0x128A, 0x128D},
|
||||
{0x1290, 0x12B0}, {0x12B2, 0x12B5}, {0x12B8, 0x12BE},
|
||||
{0x12C0, 0x12C0}, {0x12C2, 0x12C5}, {0x12C8, 0x12D6},
|
||||
{0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A},
|
||||
{0x135D, 0x137C}, {0x1380, 0x1399}, {0x13A0, 0x13F5},
|
||||
{0x13F8, 0x13FD}, {0x1400, 0x169C}, {0x16A0, 0x16F8},
|
||||
{0x1700, 0x170C}, {0x170E, 0x1714}, {0x1720, 0x1736},
|
||||
{0x1740, 0x1753}, {0x1760, 0x176C}, {0x176E, 0x1770},
|
||||
{0x1772, 0x1773}, {0x1780, 0x17DD}, {0x17E0, 0x17E9},
|
||||
{0x17F0, 0x17F9}, {0x1800, 0x180E}, {0x1810, 0x1819},
|
||||
{0x1820, 0x1878}, {0x1880, 0x18AA}, {0x18B0, 0x18F5},
|
||||
{0x1900, 0x191E}, {0x1920, 0x192B}, {0x1930, 0x193B},
|
||||
{0x1940, 0x1940}, {0x1944, 0x196D}, {0x1970, 0x1974},
|
||||
{0x1980, 0x19AB}, {0x19B0, 0x19C9}, {0x19D0, 0x19DA},
|
||||
{0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, {0x1A60, 0x1A7C},
|
||||
{0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, {0x1AA0, 0x1AAD},
|
||||
{0x1AB0, 0x1AC0}, {0x1B00, 0x1B4B}, {0x1B50, 0x1B7C},
|
||||
{0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, {0x1C3B, 0x1C49},
|
||||
{0x1C4D, 0x1C88}, {0x1C90, 0x1CBA}, {0x1CBD, 0x1CC7},
|
||||
{0x1CD0, 0x1CFA}, {0x1D00, 0x1DF9}, {0x1DFB, 0x1F15},
|
||||
{0x1F18, 0x1F1D}, {0x1F20, 0x1F45}, {0x1F48, 0x1F4D},
|
||||
{0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B},
|
||||
{0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4},
|
||||
{0x1FB6, 0x1FC4}, {0x1FC6, 0x1FD3}, {0x1FD6, 0x1FDB},
|
||||
{0x1FDD, 0x1FEF}, {0x1FF2, 0x1FF4}, {0x1FF6, 0x1FFE},
|
||||
{0x2000, 0x200F}, {0x2011, 0x2012}, {0x2017, 0x2017},
|
||||
{0x201A, 0x201B}, {0x201E, 0x201F}, {0x2023, 0x2023},
|
||||
{0x2028, 0x202F}, {0x2031, 0x2031}, {0x2034, 0x2034},
|
||||
{0x2036, 0x203A}, {0x203C, 0x203D}, {0x203F, 0x2064},
|
||||
{0x2066, 0x2071}, {0x2075, 0x207E}, {0x2080, 0x2080},
|
||||
{0x2085, 0x208E}, {0x2090, 0x209C}, {0x20A0, 0x20A8},
|
||||
{0x20AA, 0x20AB}, {0x20AD, 0x20BF}, {0x20D0, 0x20F0},
|
||||
{0x2100, 0x2102}, {0x2104, 0x2104}, {0x2106, 0x2108},
|
||||
{0x210A, 0x2112}, {0x2114, 0x2115}, {0x2117, 0x2120},
|
||||
{0x2123, 0x2125}, {0x2127, 0x212A}, {0x212C, 0x2152},
|
||||
{0x2155, 0x215A}, {0x215F, 0x215F}, {0x216C, 0x216F},
|
||||
{0x217A, 0x2188}, {0x218A, 0x218B}, {0x219A, 0x21B7},
|
||||
{0x21BA, 0x21D1}, {0x21D3, 0x21D3}, {0x21D5, 0x21E6},
|
||||
{0x21E8, 0x21FF}, {0x2201, 0x2201}, {0x2204, 0x2206},
|
||||
{0x2209, 0x220A}, {0x220C, 0x220E}, {0x2210, 0x2210},
|
||||
{0x2212, 0x2214}, {0x2216, 0x2219}, {0x221B, 0x221C},
|
||||
{0x2221, 0x2222}, {0x2224, 0x2224}, {0x2226, 0x2226},
|
||||
{0x222D, 0x222D}, {0x222F, 0x2233}, {0x2238, 0x223B},
|
||||
{0x223E, 0x2247}, {0x2249, 0x224B}, {0x224D, 0x2251},
|
||||
{0x2253, 0x225F}, {0x2262, 0x2263}, {0x2268, 0x2269},
|
||||
{0x226C, 0x226D}, {0x2270, 0x2281}, {0x2284, 0x2285},
|
||||
{0x2288, 0x2294}, {0x2296, 0x2298}, {0x229A, 0x22A4},
|
||||
{0x22A6, 0x22BE}, {0x22C0, 0x2311}, {0x2313, 0x2319},
|
||||
{0x231C, 0x2328}, {0x232B, 0x23E8}, {0x23ED, 0x23EF},
|
||||
{0x23F1, 0x23F2}, {0x23F4, 0x2426}, {0x2440, 0x244A},
|
||||
{0x24EA, 0x24EA}, {0x254C, 0x254F}, {0x2574, 0x257F},
|
||||
{0x2590, 0x2591}, {0x2596, 0x259F}, {0x25A2, 0x25A2},
|
||||
{0x25AA, 0x25B1}, {0x25B4, 0x25B5}, {0x25B8, 0x25BB},
|
||||
{0x25BE, 0x25BF}, {0x25C2, 0x25C5}, {0x25C9, 0x25CA},
|
||||
{0x25CC, 0x25CD}, {0x25D2, 0x25E1}, {0x25E6, 0x25EE},
|
||||
{0x25F0, 0x25FC}, {0x25FF, 0x2604}, {0x2607, 0x2608},
|
||||
{0x260A, 0x260D}, {0x2610, 0x2613}, {0x2616, 0x261B},
|
||||
{0x261D, 0x261D}, {0x261F, 0x263F}, {0x2641, 0x2641},
|
||||
{0x2643, 0x2647}, {0x2654, 0x265F}, {0x2662, 0x2662},
|
||||
{0x2666, 0x2666}, {0x266B, 0x266B}, {0x266E, 0x266E},
|
||||
{0x2670, 0x267E}, {0x2680, 0x2692}, {0x2694, 0x269D},
|
||||
{0x26A0, 0x26A0}, {0x26A2, 0x26A9}, {0x26AC, 0x26BC},
|
||||
{0x26C0, 0x26C3}, {0x26E2, 0x26E2}, {0x26E4, 0x26E7},
|
||||
{0x2700, 0x2704}, {0x2706, 0x2709}, {0x270C, 0x2727},
|
||||
{0x2729, 0x273C}, {0x273E, 0x274B}, {0x274D, 0x274D},
|
||||
{0x274F, 0x2752}, {0x2756, 0x2756}, {0x2758, 0x2775},
|
||||
{0x2780, 0x2794}, {0x2798, 0x27AF}, {0x27B1, 0x27BE},
|
||||
{0x27C0, 0x27E5}, {0x27EE, 0x2984}, {0x2987, 0x2B1A},
|
||||
{0x2B1D, 0x2B4F}, {0x2B51, 0x2B54}, {0x2B5A, 0x2B73},
|
||||
{0x2B76, 0x2B95}, {0x2B97, 0x2C2E}, {0x2C30, 0x2C5E},
|
||||
{0x2C60, 0x2CF3}, {0x2CF9, 0x2D25}, {0x2D27, 0x2D27},
|
||||
{0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, {0x2D6F, 0x2D70},
|
||||
{0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE},
|
||||
{0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6},
|
||||
{0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE},
|
||||
{0x2DE0, 0x2E52}, {0x303F, 0x303F}, {0x4DC0, 0x4DFF},
|
||||
{0xA4D0, 0xA62B}, {0xA640, 0xA6F7}, {0xA700, 0xA7BF},
|
||||
{0xA7C2, 0xA7CA}, {0xA7F5, 0xA82C}, {0xA830, 0xA839},
|
||||
{0xA840, 0xA877}, {0xA880, 0xA8C5}, {0xA8CE, 0xA8D9},
|
||||
{0xA8E0, 0xA953}, {0xA95F, 0xA95F}, {0xA980, 0xA9CD},
|
||||
{0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE}, {0xAA00, 0xAA36},
|
||||
{0xAA40, 0xAA4D}, {0xAA50, 0xAA59}, {0xAA5C, 0xAAC2},
|
||||
{0xAADB, 0xAAF6}, {0xAB01, 0xAB06}, {0xAB09, 0xAB0E},
|
||||
{0xAB11, 0xAB16}, {0xAB20, 0xAB26}, {0xAB28, 0xAB2E},
|
||||
{0xAB30, 0xAB6B}, {0xAB70, 0xABED}, {0xABF0, 0xABF9},
|
||||
{0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, {0xD800, 0xDFFF},
|
||||
{0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB36},
|
||||
{0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41},
|
||||
{0xFB43, 0xFB44}, {0xFB46, 0xFBC1}, {0xFBD3, 0xFD3F},
|
||||
{0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFD},
|
||||
{0xFE20, 0xFE2F}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC},
|
||||
{0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC}, {0x10000, 0x1000B},
|
||||
{0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D},
|
||||
{0x1003F, 0x1004D}, {0x10050, 0x1005D}, {0x10080, 0x100FA},
|
||||
{0x10100, 0x10102}, {0x10107, 0x10133}, {0x10137, 0x1018E},
|
||||
{0x10190, 0x1019C}, {0x101A0, 0x101A0}, {0x101D0, 0x101FD},
|
||||
{0x10280, 0x1029C}, {0x102A0, 0x102D0}, {0x102E0, 0x102FB},
|
||||
{0x10300, 0x10323}, {0x1032D, 0x1034A}, {0x10350, 0x1037A},
|
||||
{0x10380, 0x1039D}, {0x1039F, 0x103C3}, {0x103C8, 0x103D5},
|
||||
{0x10400, 0x1049D}, {0x104A0, 0x104A9}, {0x104B0, 0x104D3},
|
||||
{0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563},
|
||||
{0x1056F, 0x1056F}, {0x10600, 0x10736}, {0x10740, 0x10755},
|
||||
{0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808},
|
||||
{0x1080A, 0x10835}, {0x10837, 0x10838}, {0x1083C, 0x1083C},
|
||||
{0x1083F, 0x10855}, {0x10857, 0x1089E}, {0x108A7, 0x108AF},
|
||||
{0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x108FB, 0x1091B},
|
||||
{0x1091F, 0x10939}, {0x1093F, 0x1093F}, {0x10980, 0x109B7},
|
||||
{0x109BC, 0x109CF}, {0x109D2, 0x10A03}, {0x10A05, 0x10A06},
|
||||
{0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A35},
|
||||
{0x10A38, 0x10A3A}, {0x10A3F, 0x10A48}, {0x10A50, 0x10A58},
|
||||
{0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, {0x10AEB, 0x10AF6},
|
||||
{0x10B00, 0x10B35}, {0x10B39, 0x10B55}, {0x10B58, 0x10B72},
|
||||
{0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, {0x10BA9, 0x10BAF},
|
||||
{0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2},
|
||||
{0x10CFA, 0x10D27}, {0x10D30, 0x10D39}, {0x10E60, 0x10E7E},
|
||||
{0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD}, {0x10EB0, 0x10EB1},
|
||||
{0x10F00, 0x10F27}, {0x10F30, 0x10F59}, {0x10FB0, 0x10FCB},
|
||||
{0x10FE0, 0x10FF6}, {0x11000, 0x1104D}, {0x11052, 0x1106F},
|
||||
{0x1107F, 0x110C1}, {0x110CD, 0x110CD}, {0x110D0, 0x110E8},
|
||||
{0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11147},
|
||||
{0x11150, 0x11176}, {0x11180, 0x111DF}, {0x111E1, 0x111F4},
|
||||
{0x11200, 0x11211}, {0x11213, 0x1123E}, {0x11280, 0x11286},
|
||||
{0x11288, 0x11288}, {0x1128A, 0x1128D}, {0x1128F, 0x1129D},
|
||||
{0x1129F, 0x112A9}, {0x112B0, 0x112EA}, {0x112F0, 0x112F9},
|
||||
{0x11300, 0x11303}, {0x11305, 0x1130C}, {0x1130F, 0x11310},
|
||||
{0x11313, 0x11328}, {0x1132A, 0x11330}, {0x11332, 0x11333},
|
||||
{0x11335, 0x11339}, {0x1133B, 0x11344}, {0x11347, 0x11348},
|
||||
{0x1134B, 0x1134D}, {0x11350, 0x11350}, {0x11357, 0x11357},
|
||||
{0x1135D, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374},
|
||||
{0x11400, 0x1145B}, {0x1145D, 0x11461}, {0x11480, 0x114C7},
|
||||
{0x114D0, 0x114D9}, {0x11580, 0x115B5}, {0x115B8, 0x115DD},
|
||||
{0x11600, 0x11644}, {0x11650, 0x11659}, {0x11660, 0x1166C},
|
||||
{0x11680, 0x116B8}, {0x116C0, 0x116C9}, {0x11700, 0x1171A},
|
||||
{0x1171D, 0x1172B}, {0x11730, 0x1173F}, {0x11800, 0x1183B},
|
||||
{0x118A0, 0x118F2}, {0x118FF, 0x11906}, {0x11909, 0x11909},
|
||||
{0x1190C, 0x11913}, {0x11915, 0x11916}, {0x11918, 0x11935},
|
||||
{0x11937, 0x11938}, {0x1193B, 0x11946}, {0x11950, 0x11959},
|
||||
{0x119A0, 0x119A7}, {0x119AA, 0x119D7}, {0x119DA, 0x119E4},
|
||||
{0x11A00, 0x11A47}, {0x11A50, 0x11AA2}, {0x11AC0, 0x11AF8},
|
||||
{0x11C00, 0x11C08}, {0x11C0A, 0x11C36}, {0x11C38, 0x11C45},
|
||||
{0x11C50, 0x11C6C}, {0x11C70, 0x11C8F}, {0x11C92, 0x11CA7},
|
||||
{0x11CA9, 0x11CB6}, {0x11D00, 0x11D06}, {0x11D08, 0x11D09},
|
||||
{0x11D0B, 0x11D36}, {0x11D3A, 0x11D3A}, {0x11D3C, 0x11D3D},
|
||||
{0x11D3F, 0x11D47}, {0x11D50, 0x11D59}, {0x11D60, 0x11D65},
|
||||
{0x11D67, 0x11D68}, {0x11D6A, 0x11D8E}, {0x11D90, 0x11D91},
|
||||
{0x11D93, 0x11D98}, {0x11DA0, 0x11DA9}, {0x11EE0, 0x11EF8},
|
||||
{0x11FB0, 0x11FB0}, {0x11FC0, 0x11FF1}, {0x11FFF, 0x12399},
|
||||
{0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543},
|
||||
{0x13000, 0x1342E}, {0x13430, 0x13438}, {0x14400, 0x14646},
|
||||
{0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16A60, 0x16A69},
|
||||
{0x16A6E, 0x16A6F}, {0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5},
|
||||
{0x16B00, 0x16B45}, {0x16B50, 0x16B59}, {0x16B5B, 0x16B61},
|
||||
{0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, {0x16E40, 0x16E9A},
|
||||
{0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, {0x16F8F, 0x16F9F},
|
||||
{0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88},
|
||||
{0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, {0x1D000, 0x1D0F5},
|
||||
{0x1D100, 0x1D126}, {0x1D129, 0x1D1E8}, {0x1D200, 0x1D245},
|
||||
{0x1D2E0, 0x1D2F3}, {0x1D300, 0x1D356}, {0x1D360, 0x1D378},
|
||||
{0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F},
|
||||
{0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC},
|
||||
{0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3},
|
||||
{0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514},
|
||||
{0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E},
|
||||
{0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550},
|
||||
{0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B},
|
||||
{0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006},
|
||||
{0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024},
|
||||
{0x1E026, 0x1E02A}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D},
|
||||
{0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E2C0, 0x1E2F9},
|
||||
{0x1E2FF, 0x1E2FF}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6},
|
||||
{0x1E900, 0x1E94B}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F},
|
||||
{0x1EC71, 0x1ECB4}, {0x1ED01, 0x1ED3D}, {0x1EE00, 0x1EE03},
|
||||
{0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24},
|
||||
{0x1EE27, 0x1EE27}, {0x1EE29, 0x1EE32}, {0x1EE34, 0x1EE37},
|
||||
{0x1EE39, 0x1EE39}, {0x1EE3B, 0x1EE3B}, {0x1EE42, 0x1EE42},
|
||||
{0x1EE47, 0x1EE47}, {0x1EE49, 0x1EE49}, {0x1EE4B, 0x1EE4B},
|
||||
{0x1EE4D, 0x1EE4F}, {0x1EE51, 0x1EE52}, {0x1EE54, 0x1EE54},
|
||||
{0x1EE57, 0x1EE57}, {0x1EE59, 0x1EE59}, {0x1EE5B, 0x1EE5B},
|
||||
{0x1EE5D, 0x1EE5D}, {0x1EE5F, 0x1EE5F}, {0x1EE61, 0x1EE62},
|
||||
{0x1EE64, 0x1EE64}, {0x1EE67, 0x1EE6A}, {0x1EE6C, 0x1EE72},
|
||||
{0x1EE74, 0x1EE77}, {0x1EE79, 0x1EE7C}, {0x1EE7E, 0x1EE7E},
|
||||
{0x1EE80, 0x1EE89}, {0x1EE8B, 0x1EE9B}, {0x1EEA1, 0x1EEA3},
|
||||
{0x1EEA5, 0x1EEA9}, {0x1EEAB, 0x1EEBB}, {0x1EEF0, 0x1EEF1},
|
||||
{0x1F000, 0x1F003}, {0x1F005, 0x1F02B}, {0x1F030, 0x1F093},
|
||||
{0x1F0A0, 0x1F0AE}, {0x1F0B1, 0x1F0BF}, {0x1F0C1, 0x1F0CE},
|
||||
{0x1F0D1, 0x1F0F5}, {0x1F10B, 0x1F10F}, {0x1F12E, 0x1F12F},
|
||||
{0x1F16A, 0x1F16F}, {0x1F1AD, 0x1F1AD}, {0x1F1E6, 0x1F1FF},
|
||||
{0x1F321, 0x1F32C}, {0x1F336, 0x1F336}, {0x1F37D, 0x1F37D},
|
||||
{0x1F394, 0x1F39F}, {0x1F3CB, 0x1F3CE}, {0x1F3D4, 0x1F3DF},
|
||||
{0x1F3F1, 0x1F3F3}, {0x1F3F5, 0x1F3F7}, {0x1F43F, 0x1F43F},
|
||||
{0x1F441, 0x1F441}, {0x1F4FD, 0x1F4FE}, {0x1F53E, 0x1F54A},
|
||||
{0x1F54F, 0x1F54F}, {0x1F568, 0x1F579}, {0x1F57B, 0x1F594},
|
||||
{0x1F597, 0x1F5A3}, {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F},
|
||||
{0x1F6C6, 0x1F6CB}, {0x1F6CD, 0x1F6CF}, {0x1F6D3, 0x1F6D4},
|
||||
{0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773},
|
||||
{0x1F780, 0x1F7D8}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847},
|
||||
{0x1F850, 0x1F859}, {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD},
|
||||
{0x1F8B0, 0x1F8B1}, {0x1F900, 0x1F90B}, {0x1F93B, 0x1F93B},
|
||||
{0x1F946, 0x1F946}, {0x1FA00, 0x1FA53}, {0x1FA60, 0x1FA6D},
|
||||
{0x1FB00, 0x1FB92}, {0x1FB94, 0x1FBCA}, {0x1FBF0, 0x1FBF9},
|
||||
{0xE0001, 0xE0001}, {0xE0020, 0xE007F},
|
||||
}
|
||||
|
||||
var emoji = table{
|
||||
{0x203C, 0x203C}, {0x2049, 0x2049}, {0x2122, 0x2122},
|
||||
{0x2139, 0x2139}, {0x2194, 0x2199}, {0x21A9, 0x21AA},
|
||||
{0x231A, 0x231B}, {0x2328, 0x2328}, {0x2388, 0x2388},
|
||||
{0x23CF, 0x23CF}, {0x23E9, 0x23F3}, {0x23F8, 0x23FA},
|
||||
{0x24C2, 0x24C2}, {0x25AA, 0x25AB}, {0x25B6, 0x25B6},
|
||||
{0x25C0, 0x25C0}, {0x25FB, 0x25FE}, {0x2600, 0x2605},
|
||||
{0x2607, 0x2612}, {0x2614, 0x2685}, {0x2690, 0x2705},
|
||||
{0x2708, 0x2712}, {0x2714, 0x2714}, {0x2716, 0x2716},
|
||||
{0x271D, 0x271D}, {0x2721, 0x2721}, {0x2728, 0x2728},
|
||||
{0x2733, 0x2734}, {0x2744, 0x2744}, {0x2747, 0x2747},
|
||||
{0x274C, 0x274C}, {0x274E, 0x274E}, {0x2753, 0x2755},
|
||||
{0x2757, 0x2757}, {0x2763, 0x2767}, {0x2795, 0x2797},
|
||||
{0x27A1, 0x27A1}, {0x27B0, 0x27B0}, {0x27BF, 0x27BF},
|
||||
{0x2934, 0x2935}, {0x2B05, 0x2B07}, {0x2B1B, 0x2B1C},
|
||||
{0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x3030, 0x3030},
|
||||
{0x303D, 0x303D}, {0x3297, 0x3297}, {0x3299, 0x3299},
|
||||
{0x1F000, 0x1F0FF}, {0x1F10D, 0x1F10F}, {0x1F12F, 0x1F12F},
|
||||
{0x1F16C, 0x1F171}, {0x1F17E, 0x1F17F}, {0x1F18E, 0x1F18E},
|
||||
{0x1F191, 0x1F19A}, {0x1F1AD, 0x1F1E5}, {0x1F201, 0x1F20F},
|
||||
{0x1F21A, 0x1F21A}, {0x1F22F, 0x1F22F}, {0x1F232, 0x1F23A},
|
||||
{0x1F23C, 0x1F23F}, {0x1F249, 0x1F3FA}, {0x1F400, 0x1F53D},
|
||||
{0x1F546, 0x1F64F}, {0x1F680, 0x1F6FF}, {0x1F774, 0x1F77F},
|
||||
{0x1F7D5, 0x1F7FF}, {0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F},
|
||||
{0x1F85A, 0x1F85F}, {0x1F888, 0x1F88F}, {0x1F8AE, 0x1F8FF},
|
||||
{0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1FAFF},
|
||||
{0x1FC00, 0x1FFFD},
|
||||
}
|
154
vendor/github.com/mattn/go-xmpp/xmpp.go
generated
vendored
154
vendor/github.com/mattn/go-xmpp/xmpp.go
generated
vendored
|
@ -199,13 +199,35 @@ type Options struct {
|
|||
// NewClient establishes a new Client connection based on a set of Options.
|
||||
func (o Options) NewClient() (*Client, error) {
|
||||
host := o.Host
|
||||
if strings.TrimSpace(host) == "" {
|
||||
a := strings.SplitN(o.User, "@", 2)
|
||||
if len(a) == 2 {
|
||||
if _, addrs, err := net.LookupSRV("xmpp-client", "tcp", a[1]); err == nil {
|
||||
if len(addrs) > 0 {
|
||||
// default to first record
|
||||
host = fmt.Sprintf("%s:%d", addrs[0].Target, addrs[0].Port)
|
||||
defP := addrs[0].Priority
|
||||
for _, adr := range addrs {
|
||||
if adr.Priority < defP {
|
||||
host = fmt.Sprintf("%s:%d", adr.Target, adr.Port)
|
||||
defP = adr.Priority
|
||||
}
|
||||
}
|
||||
} else {
|
||||
host = a[1]
|
||||
}
|
||||
} else {
|
||||
host = a[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
c, err := connect(host, o.User, o.Password)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if strings.LastIndex(o.Host, ":") > 0 {
|
||||
host = host[:strings.LastIndex(o.Host, ":")]
|
||||
if strings.LastIndex(host, ":") > 0 {
|
||||
host = host[:strings.LastIndex(host, ":")]
|
||||
}
|
||||
|
||||
client := new(Client)
|
||||
|
@ -575,6 +597,8 @@ type Chat struct {
|
|||
Text string
|
||||
Subject string
|
||||
Thread string
|
||||
Ooburl string
|
||||
Oobdesc string
|
||||
Roster Roster
|
||||
Other []string
|
||||
OtherElem []XMLElement
|
||||
|
@ -616,6 +640,11 @@ func (c *Client) Recv() (stanza interface{}, err error) {
|
|||
}
|
||||
switch v := val.(type) {
|
||||
case *clientMessage:
|
||||
if v.Event.XMLNS == XMPPNS_PUBSUB_EVENT {
|
||||
// Handle Pubsub notifications
|
||||
return pubsubClientToReturn(v.Event), nil
|
||||
}
|
||||
|
||||
stamp, _ := time.Parse(
|
||||
"2006-01-02T15:04:05Z",
|
||||
v.Delay.Stamp,
|
||||
|
@ -640,35 +669,138 @@ func (c *Client) Recv() (stanza interface{}, err error) {
|
|||
case *clientPresence:
|
||||
return Presence{v.From, v.To, v.Type, v.Show, v.Status}, nil
|
||||
case *clientIQ:
|
||||
switch {
|
||||
case v.Query.XMLName.Space == "urn:xmpp:ping":
|
||||
// TODO check more strictly
|
||||
if bytes.Equal(bytes.TrimSpace(v.Query), []byte(`<ping xmlns='urn:xmpp:ping'/>`)) || bytes.Equal(bytes.TrimSpace(v.Query), []byte(`<ping xmlns="urn:xmpp:ping"/>`)) {
|
||||
err := c.SendResultPing(v.ID, v.From)
|
||||
if err != nil {
|
||||
return Chat{}, err
|
||||
}
|
||||
fallthrough
|
||||
case v.Type == "error":
|
||||
switch v.ID {
|
||||
case "sub1":
|
||||
// Pubsub subscription failed
|
||||
var errs []clientPubsubError
|
||||
err := xml.Unmarshal([]byte(v.Error.InnerXML), &errs)
|
||||
if err != nil {
|
||||
return PubsubSubscription{}, err
|
||||
}
|
||||
|
||||
var errsStr []string
|
||||
for _, e := range errs {
|
||||
errsStr = append(errsStr, e.XMLName.Local)
|
||||
}
|
||||
|
||||
return PubsubSubscription{
|
||||
Errors: errsStr,
|
||||
}, nil
|
||||
}
|
||||
case v.Type == "result" && v.ID == "unsub1":
|
||||
// Unsubscribing MAY contain a pubsub element. But it does
|
||||
// not have to
|
||||
return PubsubUnsubscription{
|
||||
SubID: "",
|
||||
JID: v.From,
|
||||
Node: "",
|
||||
Errors: nil,
|
||||
}, nil
|
||||
case v.Query.XMLName.Local == "pubsub":
|
||||
switch v.ID {
|
||||
case "sub1":
|
||||
// Subscription or unsubscription was successful
|
||||
var sub clientPubsubSubscription
|
||||
err := xml.Unmarshal([]byte(v.Query.InnerXML), &sub)
|
||||
if err != nil {
|
||||
return PubsubSubscription{}, err
|
||||
}
|
||||
|
||||
return PubsubSubscription{
|
||||
SubID: sub.SubID,
|
||||
JID: sub.JID,
|
||||
Node: sub.Node,
|
||||
Errors: nil,
|
||||
}, nil
|
||||
case "unsub1":
|
||||
var sub clientPubsubSubscription
|
||||
err := xml.Unmarshal([]byte(v.Query.InnerXML), &sub)
|
||||
if err != nil {
|
||||
return PubsubUnsubscription{}, err
|
||||
}
|
||||
|
||||
return PubsubUnsubscription{
|
||||
SubID: sub.SubID,
|
||||
JID: v.From,
|
||||
Node: sub.Node,
|
||||
Errors: nil,
|
||||
}, nil
|
||||
case "items1", "items3":
|
||||
var p clientPubsubItems
|
||||
err := xml.Unmarshal([]byte(v.Query.InnerXML), &p)
|
||||
if err != nil {
|
||||
return PubsubItems{}, err
|
||||
}
|
||||
|
||||
return PubsubItems{
|
||||
p.Node,
|
||||
pubsubItemsToReturn(p.Items),
|
||||
}, nil
|
||||
}
|
||||
case v.Query.XMLName.Local == "":
|
||||
return IQ{ID: v.ID, From: v.From, To: v.To, Type: v.Type}, nil
|
||||
default:
|
||||
res, err := xml.Marshal(v.Query)
|
||||
if err != nil {
|
||||
return Chat{}, err
|
||||
}
|
||||
|
||||
return IQ{ID: v.ID, From: v.From, To: v.To, Type: v.Type,
|
||||
Query: res}, nil
|
||||
}
|
||||
return IQ{ID: v.ID, From: v.From, To: v.To, Type: v.Type, Query: v.Query}, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Send sends the message wrapped inside an XMPP message stanza body.
|
||||
func (c *Client) Send(chat Chat) (n int, err error) {
|
||||
var subtext = ``
|
||||
var thdtext = ``
|
||||
var subtext, thdtext, oobtext string
|
||||
if chat.Subject != `` {
|
||||
subtext = `<subject>` + xmlEscape(chat.Subject) + `</subject>`
|
||||
}
|
||||
if chat.Thread != `` {
|
||||
thdtext = `<thread>` + xmlEscape(chat.Thread) + `</thread>`
|
||||
}
|
||||
if chat.Ooburl != `` {
|
||||
oobtext = `<x xmlns="jabber:x:oob"><url>` + xmlEscape(chat.Ooburl) + `</url>`
|
||||
if chat.Oobdesc != `` {
|
||||
oobtext += `<desc>` + xmlEscape(chat.Oobdesc) + `</desc>`
|
||||
}
|
||||
oobtext += `</x>`
|
||||
}
|
||||
|
||||
stanza := "<message to='%s' type='%s' id='%s' xml:lang='en'>" + subtext + "<body>%s</body>" + thdtext + "</message>"
|
||||
stanza := "<message to='%s' type='%s' id='%s' xml:lang='en'>" + subtext + "<body>%s</body>" + oobtext + thdtext + "</message>"
|
||||
|
||||
return fmt.Fprintf(c.conn, stanza,
|
||||
xmlEscape(chat.Remote), xmlEscape(chat.Type), cnonce(), xmlEscape(chat.Text))
|
||||
}
|
||||
|
||||
// SendOOB sends OOB data wrapped inside an XMPP message stanza, without actual body.
|
||||
func (c *Client) SendOOB(chat Chat) (n int, err error) {
|
||||
var thdtext, oobtext string
|
||||
if chat.Thread != `` {
|
||||
thdtext = `<thread>` + xmlEscape(chat.Thread) + `</thread>`
|
||||
}
|
||||
if chat.Ooburl != `` {
|
||||
oobtext = `<x xmlns="jabber:x:oob"><url>` + xmlEscape(chat.Ooburl) + `</url>`
|
||||
if chat.Oobdesc != `` {
|
||||
oobtext += `<desc>` + xmlEscape(chat.Oobdesc) + `</desc>`
|
||||
}
|
||||
oobtext += `</x>`
|
||||
}
|
||||
return fmt.Fprintf(c.conn, "<message to='%s' type='%s' id='%s' xml:lang='en'>"+oobtext+thdtext+"</message>",
|
||||
xmlEscape(chat.Remote), xmlEscape(chat.Type), cnonce())
|
||||
}
|
||||
|
||||
// SendOrg sends the original text without being wrapped in an XMPP message stanza.
|
||||
func (c *Client) SendOrg(org string) (n int, err error) {
|
||||
return fmt.Fprint(c.conn, org)
|
||||
|
@ -777,6 +909,9 @@ type clientMessage struct {
|
|||
Body string `xml:"body"`
|
||||
Thread string `xml:"thread"`
|
||||
|
||||
// Pubsub
|
||||
Event clientPubsubEvent `xml:"event"`
|
||||
|
||||
// Any hasn't matched element
|
||||
Other []XMLElement `xml:",any"`
|
||||
|
||||
|
@ -848,7 +983,7 @@ type clientIQ struct {
|
|||
ID string `xml:"id,attr"`
|
||||
To string `xml:"to,attr"`
|
||||
Type string `xml:"type,attr"` // error, get, result, set
|
||||
Query []byte `xml:",innerxml"`
|
||||
Query XMLElement `xml:",any"`
|
||||
Error clientError
|
||||
Bind bindBind
|
||||
}
|
||||
|
@ -856,8 +991,9 @@ type clientIQ struct {
|
|||
type clientError struct {
|
||||
XMLName xml.Name `xml:"jabber:client error"`
|
||||
Code string `xml:",attr"`
|
||||
Type string `xml:",attr"`
|
||||
Type string `xml:"type,attr"`
|
||||
Any xml.Name
|
||||
InnerXML []byte `xml:",innerxml"`
|
||||
Text string
|
||||
}
|
||||
|
||||
|
|
133
vendor/github.com/mattn/go-xmpp/xmpp_pubsub.go
generated
vendored
Normal file
133
vendor/github.com/mattn/go-xmpp/xmpp_pubsub.go
generated
vendored
Normal file
|
@ -0,0 +1,133 @@
|
|||
package xmpp
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
const (
|
||||
XMPPNS_PUBSUB = "http://jabber.org/protocol/pubsub"
|
||||
XMPPNS_PUBSUB_EVENT = "http://jabber.org/protocol/pubsub#event"
|
||||
)
|
||||
|
||||
type clientPubsubItem struct {
|
||||
XMLName xml.Name `xml:"item"`
|
||||
ID string `xml:"id,attr"`
|
||||
Body []byte `xml:",innerxml"`
|
||||
}
|
||||
|
||||
type clientPubsubItems struct {
|
||||
XMLName xml.Name `xml:"items"`
|
||||
Node string `xml:"node,attr"`
|
||||
Items []clientPubsubItem `xml:"item"`
|
||||
}
|
||||
|
||||
type clientPubsub struct {
|
||||
XMLName xml.Name `xml:"pubsub"`
|
||||
Items clientPubsubItems `xml:"items"`
|
||||
}
|
||||
|
||||
type clientPubsubEvent struct {
|
||||
XMLName xml.Name `xml:"event"`
|
||||
XMLNS string `xml:"xmlns,attr"`
|
||||
Items clientPubsubItems `xml:"items"`
|
||||
}
|
||||
|
||||
type clientPubsubError struct {
|
||||
XMLName xml.Name
|
||||
}
|
||||
|
||||
type clientPubsubSubscription struct {
|
||||
XMLName xml.Name `xml:"subscription"`
|
||||
Node string `xml:"node,attr"`
|
||||
JID string `xml:"jid,attr"`
|
||||
SubID string `xml:"subid,attr"`
|
||||
}
|
||||
|
||||
type PubsubEvent struct {
|
||||
Node string
|
||||
Items []PubsubItem
|
||||
}
|
||||
|
||||
type PubsubSubscription struct {
|
||||
SubID string
|
||||
JID string
|
||||
Node string
|
||||
Errors []string
|
||||
}
|
||||
type PubsubUnsubscription PubsubSubscription
|
||||
|
||||
type PubsubItem struct {
|
||||
ID string
|
||||
InnerXML []byte
|
||||
}
|
||||
|
||||
type PubsubItems struct {
|
||||
Node string
|
||||
Items []PubsubItem
|
||||
}
|
||||
|
||||
// Converts []clientPubsubItem to []PubsubItem
|
||||
func pubsubItemsToReturn(items []clientPubsubItem) []PubsubItem {
|
||||
var tmp []PubsubItem
|
||||
for _, i := range items {
|
||||
tmp = append(tmp, PubsubItem{
|
||||
ID: i.ID,
|
||||
InnerXML: i.Body,
|
||||
})
|
||||
}
|
||||
|
||||
return tmp
|
||||
}
|
||||
|
||||
func pubsubClientToReturn(event clientPubsubEvent) PubsubEvent {
|
||||
return PubsubEvent{
|
||||
Node: event.Items.Node,
|
||||
Items: pubsubItemsToReturn(event.Items.Items),
|
||||
}
|
||||
}
|
||||
|
||||
func pubsubStanza(body string) string {
|
||||
return fmt.Sprintf("<pubsub xmlns='%s'>%s</pubsub>",
|
||||
XMPPNS_PUBSUB, body)
|
||||
}
|
||||
|
||||
func pubsubSubscriptionStanza(node, jid string) string {
|
||||
body := fmt.Sprintf("<subscribe node='%s' jid='%s'/>",
|
||||
xmlEscape(node),
|
||||
xmlEscape(jid))
|
||||
return pubsubStanza(body)
|
||||
}
|
||||
|
||||
func pubsubUnsubscriptionStanza(node, jid string) string {
|
||||
body := fmt.Sprintf("<unsubscribe node='%s' jid='%s'/>",
|
||||
xmlEscape(node),
|
||||
xmlEscape(jid))
|
||||
return pubsubStanza(body)
|
||||
}
|
||||
|
||||
func (c *Client) PubsubSubscribeNode(node, jid string) {
|
||||
c.RawInformation(c.jid,
|
||||
jid,
|
||||
"sub1",
|
||||
"set",
|
||||
pubsubSubscriptionStanza(node, c.jid))
|
||||
}
|
||||
|
||||
func (c *Client) PubsubUnsubscribeNode(node, jid string) {
|
||||
c.RawInformation(c.jid,
|
||||
jid,
|
||||
"unsub1",
|
||||
"set",
|
||||
pubsubUnsubscriptionStanza(node, c.jid))
|
||||
}
|
||||
|
||||
func (c *Client) PubsubRequestLastItems(node, jid string) {
|
||||
body := fmt.Sprintf("<items node='%s'/>", node)
|
||||
c.RawInformation(c.jid, jid, "items1", "get", pubsubStanza(body))
|
||||
}
|
||||
|
||||
func (c *Client) PubsubRequestItem(node, jid, id string) {
|
||||
body := fmt.Sprintf("<items node='%s'><item id='%s'/></items>", node, id)
|
||||
c.RawInformation(c.jid, jid, "items3", "get", pubsubStanza(body))
|
||||
}
|
3
vendor/github.com/mmcdole/gofeed/.gitignore
generated
vendored
3
vendor/github.com/mmcdole/gofeed/.gitignore
generated
vendored
|
@ -26,3 +26,6 @@ _testmain.go
|
|||
.DS_STORE
|
||||
|
||||
cmd/ftest/ftest
|
||||
|
||||
# Goland specific files
|
||||
.idea
|
||||
|
|
38
vendor/github.com/mmcdole/gofeed/.travis.yml
generated
vendored
38
vendor/github.com/mmcdole/gofeed/.travis.yml
generated
vendored
|
@ -1,24 +1,24 @@
|
|||
language: go
|
||||
|
||||
env:
|
||||
global:
|
||||
- GO111MODULE="on"
|
||||
|
||||
go:
|
||||
- 1.11.x
|
||||
- 1.12.x
|
||||
- 1.13.x
|
||||
- tip
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- go: "tip"
|
||||
- go: "1.11"
|
||||
env: GO111MODULE=on
|
||||
- go: "1.11"
|
||||
- go: "1.10"
|
||||
- go: "1.9"
|
||||
- go: "1.8"
|
||||
- go: "1.7"
|
||||
- go: "1.6"
|
||||
- go: "1.5"
|
||||
- go: "1.4"
|
||||
install:
|
||||
- go get -t -v ./...
|
||||
- go get github.com/go-playground/overalls
|
||||
allow_failures:
|
||||
- go: tip
|
||||
fast_finish: true
|
||||
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
|
||||
script:
|
||||
- go install ./...
|
||||
- go test -v ./...
|
||||
- $GOPATH/bin/overalls -project=github.com/mmcdole/gofeed -covermode=count -ignore=.git,vendor -debug
|
||||
after_success:
|
||||
- $GOPATH/bin/goveralls -coverprofile=overalls.coverprofile -service=travis-ci
|
||||
- $GOPATH/bin/goveralls -service=travis-ci
|
||||
|
|
135
vendor/github.com/mmcdole/gofeed/README.md
generated
vendored
135
vendor/github.com/mmcdole/gofeed/README.md
generated
vendored
|
@ -2,9 +2,10 @@
|
|||
|
||||
[![Build Status](https://travis-ci.org/mmcdole/gofeed.svg?branch=master)](https://travis-ci.org/mmcdole/gofeed) [![Coverage Status](https://coveralls.io/repos/github/mmcdole/gofeed/badge.svg?branch=master)](https://coveralls.io/github/mmcdole/gofeed?branch=master) [![Go Report Card](https://goreportcard.com/badge/github.com/mmcdole/gofeed)](https://goreportcard.com/report/github.com/mmcdole/gofeed) [![](https://godoc.org/github.com/mmcdole/gofeed?status.svg)](http://godoc.org/github.com/mmcdole/gofeed) [![License](http://img.shields.io/:license-mit-blue.svg)](http://doge.mit-license.org)
|
||||
|
||||
The `gofeed` library is a robust feed parser that supports parsing both [RSS](https://en.wikipedia.org/wiki/RSS) and [Atom](https://en.wikipedia.org/wiki/Atom_(standard)) feeds. The library provides a universal `gofeed.Parser` that will parse and convert all feed types into a hybrid `gofeed.Feed` model. You also have the option of utilizing the feed specific `atom.Parser` or `rss.Parser` parsers which generate `atom.Feed` and `rss.Feed` respectively.
|
||||
The `gofeed` library is a robust feed parser that supports parsing both [RSS](https://en.wikipedia.org/wiki/RSS), [Atom](<https://en.wikipedia.org/wiki/Atom_(standard)>) and [JSON](https://jsonfeed.org/version/1) feeds. The library provides a universal `gofeed.Parser` that will parse and convert all feed types into a hybrid `gofeed.Feed` model. You also have the option of utilizing the feed specific `atom.Parser` or `rss.Parser` or `json.Parser` parsers which generate `atom.Feed`, `rss.Feed` and `json.Feed` respectively.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Features](#features)
|
||||
- [Overview](#overview)
|
||||
- [Basic Usage](#basic-usage)
|
||||
|
@ -19,16 +20,18 @@ The `gofeed` library is a robust feed parser that supports parsing both [RSS](ht
|
|||
## Features
|
||||
|
||||
#### Supported feed types:
|
||||
* RSS 0.90
|
||||
* Netscape RSS 0.91
|
||||
* Userland RSS 0.91
|
||||
* RSS 0.92
|
||||
* RSS 0.93
|
||||
* RSS 0.94
|
||||
* RSS 1.0
|
||||
* RSS 2.0
|
||||
* Atom 0.3
|
||||
* Atom 1.0
|
||||
|
||||
- RSS 0.90
|
||||
- Netscape RSS 0.91
|
||||
- Userland RSS 0.91
|
||||
- RSS 0.92
|
||||
- RSS 0.93
|
||||
- RSS 0.94
|
||||
- RSS 1.0
|
||||
- RSS 2.0
|
||||
- Atom 0.3
|
||||
- Atom 1.0
|
||||
- JSON 1.0
|
||||
|
||||
#### Extension Support
|
||||
|
||||
|
@ -39,6 +42,7 @@ It parses all other feed extensions in a generic way (see the [Extensions](#exte
|
|||
#### Invalid Feeds
|
||||
|
||||
A best-effort attempt is made at parsing broken and invalid XML feeds. Currently, `gofeed` can succesfully parse feeds with the following issues:
|
||||
|
||||
- Unescaped/Naked Markup in feed elements
|
||||
- Undeclared namespace prefixes
|
||||
- Missing closing tags on certain elements
|
||||
|
@ -48,25 +52,25 @@ A best-effort attempt is made at parsing broken and invalid XML feeds. Currentl
|
|||
|
||||
## Overview
|
||||
|
||||
The `gofeed` library is comprised of a universal feed parser and several feed specific parsers. Which one you choose depends entirely on your usecase. If you will be handling both rss and atom feeds then it makes sense to use the `gofeed.Parser`. If you know ahead of time that you will only be parsing one feed type then it would make sense to use `rss.Parser` or `atom.Parser`.
|
||||
The `gofeed` library is comprised of a universal feed parser and several feed specific parsers. Which one you choose depends entirely on your usecase. If you will be handling rss, atom and json feeds then it makes sense to use the `gofeed.Parser`. If you know ahead of time that you will only be parsing one feed type then it would make sense to use `rss.Parser` or `atom.Parser` or `json.Parser`.
|
||||
|
||||
#### Universal Feed Parser
|
||||
|
||||
The universal `gofeed.Parser` works in 3 stages: detection, parsing and translation. It first detects the feed type that it is currently parsing. Then it uses a feed specific parser to parse the feed into its true representation which will be either a `rss.Feed` or `atom.Feed`. These models cover every field possible for their respective feed types. Finally, they are *translated* into a `gofeed.Feed` model that is a hybrid of both feed types. Performing the universal feed parsing in these 3 stages allows for more flexibility and keeps the code base more maintainable by separating RSS and Atom parsing into seperate packages.
|
||||
The universal `gofeed.Parser` works in 3 stages: detection, parsing and translation. It first detects the feed type that it is currently parsing. Then it uses a feed specific parser to parse the feed into its true representation which will be either a `rss.Feed` or `atom.Feed` or `json.Feed`. These models cover every field possible for their respective feed types. Finally, they are _translated_ into a `gofeed.Feed` model that is a hybrid of all feed types. Performing the universal feed parsing in these 3 stages allows for more flexibility and keeps the code base more maintainable by separating RSS, Atom and Json parsing into seperate packages.
|
||||
|
||||
![Diagram](docs/sequence.png)
|
||||
|
||||
The translation step is done by anything which adheres to the `gofeed.Translator` interface. The `DefaultRSSTranslator` and `DefaultAtomTranslator` are used behind the scenes when you use the `gofeed.Parser` with its default settings. You can see how they translate fields from ```atom.Feed``` or ```rss.Feed``` to the universal ```gofeed.Feed``` struct in the [Default Mappings](#default-mappings) section. However, should you disagree with the way certain fields are translated you can easily supply your own `gofeed.Translator` and override this behavior. See the [Advanced Usage](#advanced-usage) section for an example how to do this.
|
||||
The translation step is done by anything which adheres to the `gofeed.Translator` interface. The `DefaultRSSTranslator`, `DefaultAtomTranslator`, `DefaultJSONTranslator` are used behind the scenes when you use the `gofeed.Parser` with its default settings. You can see how they translate fields from `atom.Feed` or `rss.Feed` `json.Feed` to the universal `gofeed.Feed` struct in the [Default Mappings](#default-mappings) section. However, should you disagree with the way certain fields are translated you can easily supply your own `gofeed.Translator` and override this behavior. See the [Advanced Usage](#advanced-usage) section for an example how to do this.
|
||||
|
||||
#### Feed Specific Parsers
|
||||
|
||||
The `gofeed` library provides two feed specific parsers: `atom.Parser` and `rss.Parser`. If the hybrid `gofeed.Feed` model that the universal `gofeed.Parser` produces does not contain a field from the `atom.Feed` or `rss.Feed` model that you require, it might be beneficial to use the feed specific parsers. When using the `atom.Parser` or `rss.Parser` directly, you can access all of fields found in the `atom.Feed` and `rss.Feed` models. It is also marginally faster because you are able to skip the translation step.
|
||||
The `gofeed` library provides two feed specific parsers: `atom.Parser`, `rss.Parser` and `json.Parser`. If the hybrid `gofeed.Feed` model that the universal `gofeed.Parser` produces does not contain a field from the `atom.Feed` or `rss.Feed` or `json.Feed` model that you require, it might be beneficial to use the feed specific parsers. When using the `atom.Parser` or `rss.Parser` or `json.Parser` directly, you can access all of fields found in the `atom.Feed`, `rss.Feed` and `json.Feed` models. It is also marginally faster because you are able to skip the translation step.
|
||||
|
||||
## Basic Usage
|
||||
|
||||
#### Universal Feed Parser
|
||||
|
||||
The most common usage scenario will be to use ```gofeed.Parser``` to parse an arbitrary RSS or Atom feed into the hybrid ```gofeed.Feed``` model. This hybrid model allows you to treat RSS and Atom feeds the same.
|
||||
The most common usage scenario will be to use `gofeed.Parser` to parse an arbitrary RSS or Atom or JSON feed into the hybrid `gofeed.Feed` model. This hybrid model allows you to treat RSS, Atom and JSON feeds the same.
|
||||
|
||||
##### Parse a feed from an URL:
|
||||
|
||||
|
@ -99,9 +103,19 @@ feed, _ := fp.Parse(file)
|
|||
fmt.Println(feed.Title)
|
||||
```
|
||||
|
||||
##### Parse a feed from an URL with a 60s timeout:
|
||||
|
||||
```go
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
|
||||
defer cancel()
|
||||
fp := gofeed.NewParser()
|
||||
feed, _ := fp.ParseURLWithContext("http://feeds.twit.tv/twit.xml", ctx)
|
||||
fmt.Println(feed.Title)
|
||||
```
|
||||
|
||||
#### Feed Specific Parsers
|
||||
|
||||
You can easily use the `rss.Parser` and `atom.Parser` directly if you have a usage scenario that requires it:
|
||||
You can easily use the `rss.Parser`, `atom.Parser` or `json.Parser` directly if you have a usage scenario that requires it:
|
||||
|
||||
##### Parse a RSS feed into a `rss.Feed`
|
||||
|
||||
|
@ -127,11 +141,20 @@ atomFeed, _ := fp.Parse(strings.NewReader(feedData))
|
|||
fmt.Println(atomFeed.Subtitle)
|
||||
```
|
||||
|
||||
##### Parse a JSON feed into a `json.Feed`
|
||||
|
||||
```go
|
||||
feedData := `{"version":"1.0", "home_page_url": "https://daringfireball.net"}`
|
||||
fp := json.Parser{}
|
||||
jsonFeed, _ := fp.Parse(strings.NewReader(feedData))
|
||||
fmt.Println(jsonFeed.HomePageURL)
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
##### Parse a feed while using a custom translator
|
||||
|
||||
The mappings and precedence order that are outlined in the [Default Mappings](#default-mappings) section are provided by the following two structs: `DefaultRSSTranslator` and `DefaultAtomTranslator`. If you have fields that you think should have a different precedence, or if you want to make a translator that is aware of an unsupported extension you can do this by specifying your own RSS or Atom translator when using the `gofeed.Parser`.
|
||||
The mappings and precedence order that are outlined in the [Default Mappings](#default-mappings) section are provided by the following two structs: `DefaultRSSTranslator`, `DefaultAtomTranslator` and `DefaultJSONTranslator`. If you have fields that you think should have a different precedence, or if you want to make a translator that is aware of an unsupported extension you can do this by specifying your own RSS or Atom or JSON translator when using the `gofeed.Parser`.
|
||||
|
||||
Here is a simple example of creating a custom `Translator` that makes the `/rss/channel/itunes:author` field have a higher precedence than the `/rss/channel/managingEditor` field in RSS feeds. We will wrap the existing `DefaultRSSTranslator` since we only want to change the behavior for a single field.
|
||||
|
||||
|
@ -203,44 +226,45 @@ In addition to the generic handling of extensions, `gofeed` also has built in su
|
|||
|
||||
## Default Mappings
|
||||
|
||||
The ```DefaultRSSTranslator``` and the ```DefaultAtomTranslator``` map the following ```rss.Feed``` and ```atom.Feed``` fields to their respective ```gofeed.Feed``` fields. They are listed in order of precedence (highest to lowest):
|
||||
The `DefaultRSSTranslator`, the `DefaultAtomTranslator` and the `DefaultJSONTranslator` map the following `rss.Feed`, `atom.Feed` and `json.Feed` fields to their respective `gofeed.Feed` fields. They are listed in order of precedence (highest to lowest):
|
||||
|
||||
`gofeed.Feed` | RSS | Atom | JSON
|
||||
--- | --- | --- | --
|
||||
Title | /rss/channel/title<br>/rdf:RDF/channel/title<br>/rss/channel/dc:title<br>/rdf:RDF/channel/dc:title | /feed/title | /title
|
||||
Description | /rss/channel/description<br>/rdf:RDF/channel/description<br>/rss/channel/itunes:subtitle | /feed/subtitle<br>/feed/tagline | /description
|
||||
Link | /rss/channel/link<br>/rdf:RDF/channel/link | /feed/link[@rel=”alternate”]/@href<br>/feed/link[not(@rel)]/@href | /home_page_url
|
||||
FeedLink | /rss/channel/atom:link[@rel="self"]/@href<br>/rdf:RDF/channel/atom:link[@rel="self"]/@href | /feed/link[@rel="self"]/@href | /feed_url
|
||||
Updated | /rss/channel/lastBuildDate<br>/rss/channel/dc:date<br>/rdf:RDF/channel/dc:date | /feed/updated<br>/feed/modified | /items[0]/date_modified
|
||||
Published | /rss/channel/pubDate | | /items[0]/date_published
|
||||
Author | /rss/channel/managingEditor<br>/rss/channel/webMaster<br>/rss/channel/dc:author<br>/rdf:RDF/channel/dc:author<br>/rss/channel/dc:creator<br>/rdf:RDF/channel/dc:creator<br>/rss/channel/itunes:author | /feed/author | /author/name
|
||||
Language | /rss/channel/language<br>/rss/channel/dc:language<br>/rdf:RDF/channel/dc:language | /feed/@xml:lang |
|
||||
Image | /rss/channel/image<br>/rdf:RDF/image<br>/rss/channel/itunes:image | /feed/logo | /icon
|
||||
Copyright | /rss/channel/copyright<br>/rss/channel/dc:rights<br>/rdf:RDF/channel/dc:rights | /feed/rights<br>/feed/copyright |
|
||||
Generator | /rss/channel/generator | /feed/generator |
|
||||
Categories | /rss/channel/category<br>/rss/channel/itunes:category<br>/rss/channel/itunes:keywords<br>/rss/channel/dc:subject<br>/rdf:RDF/channel/dc:subject | /feed/category |
|
||||
|
||||
|
||||
`gofeed.Feed` | RSS | Atom
|
||||
--- | --- | ---
|
||||
Title | /rss/channel/title<br>/rdf:RDF/channel/title<br>/rss/channel/dc:title<br>/rdf:RDF/channel/dc:title | /feed/title
|
||||
Description | /rss/channel/description<br>/rdf:RDF/channel/description<br>/rss/channel/itunes:subtitle | /feed/subtitle<br>/feed/tagline
|
||||
Link | /rss/channel/link<br>/rdf:RDF/channel/link | /feed/link[@rel=”alternate”]/@href<br>/feed/link[not(@rel)]/@href
|
||||
FeedLink | /rss/channel/atom:link[@rel="self"]/@href<br>/rdf:RDF/channel/atom:link[@rel="self"]/@href | /feed/link[@rel="self"]/@href
|
||||
Updated | /rss/channel/lastBuildDate<br>/rss/channel/dc:date<br>/rdf:RDF/channel/dc:date | /feed/updated<br>/feed/modified
|
||||
Published | /rss/channel/pubDate |
|
||||
Author | /rss/channel/managingEditor<br>/rss/channel/webMaster<br>/rss/channel/dc:author<br>/rdf:RDF/channel/dc:author<br>/rss/channel/dc:creator<br>/rdf:RDF/channel/dc:creator<br>/rss/channel/itunes:author | /feed/author
|
||||
Language | /rss/channel/language<br>/rss/channel/dc:language<br>/rdf:RDF/channel/dc:language | /feed/@xml:lang
|
||||
Image | /rss/channel/image<br>/rdf:RDF/image<br>/rss/channel/itunes:image | /feed/logo
|
||||
Copyright | /rss/channel/copyright<br>/rss/channel/dc:rights<br>/rdf:RDF/channel/dc:rights | /feed/rights<br>/feed/copyright
|
||||
Generator | /rss/channel/generator | /feed/generator
|
||||
Categories | /rss/channel/category<br>/rss/channel/itunes:category<br>/rss/channel/itunes:keywords<br>/rss/channel/dc:subject<br>/rdf:RDF/channel/dc:subject | /feed/category
|
||||
`gofeed.Item` | RSS | Atom | JSON
|
||||
--- | --- | --- | ---
|
||||
Title | /rss/channel/item/title<br>/rdf:RDF/item/title<br>/rdf:RDF/item/dc:title<br>/rss/channel/item/dc:title | /feed/entry/title | /items/title
|
||||
Description | /rss/channel/item/description<br>/rdf:RDF/item/description<br>/rss/channel/item/dc:description<br>/rdf:RDF/item/dc:description | /feed/entry/summary | /items/summary
|
||||
Content | /rss/channel/item/content:encoded | /feed/entry/content | /items/content_html
|
||||
Link | /rss/channel/item/link<br>/rdf:RDF/item/link | /feed/entry/link[@rel=”alternate”]/@href<br>/feed/entry/link[not(@rel)]/@href | /items/url
|
||||
Updated | /rss/channel/item/dc:date<br>/rdf:RDF/rdf:item/dc:date | /feed/entry/modified<br>/feed/entry/updated | /items/date_modified
|
||||
Published | /rss/channel/item/pubDate<br>/rss/channel/item/dc:date | /feed/entry/published<br>/feed/entry/issued | /items/date_published
|
||||
Author | /rss/channel/item/author<br>/rss/channel/item/dc:author<br>/rdf:RDF/item/dc:author<br>/rss/channel/item/dc:creator<br>/rdf:RDF/item/dc:creator<br>/rss/channel/item/itunes:author | /feed/entry/author | /items/author/name
|
||||
GUID | /rss/channel/item/guid | /feed/entry/id | /items/id
|
||||
Image | /rss/channel/item/itunes:image<br>/rss/channel/item/media:image | | /items/image<br>/items/banner_image
|
||||
Categories | /rss/channel/item/category<br>/rss/channel/item/dc:subject<br>/rss/channel/item/itunes:keywords<br>/rdf:RDF/channel/item/dc:subject | /feed/entry/category | /items/tags
|
||||
Enclosures | /rss/channel/item/enclosure | /feed/entry/link[@rel=”enclosure”] | /items/attachments
|
||||
|
||||
|
||||
`gofeed.Item` | RSS | Atom
|
||||
--- | --- | ---
|
||||
Title | /rss/channel/item/title<br>/rdf:RDF/item/title<br>/rdf:RDF/item/dc:title<br>/rss/channel/item/dc:title | /feed/entry/title
|
||||
Description | /rss/channel/item/description<br>/rdf:RDF/item/description<br>/rss/channel/item/dc:description<br>/rdf:RDF/item/dc:description | /feed/entry/summary
|
||||
Content | /rss/channel/item/content:encoded | /feed/entry/content
|
||||
Link | /rss/channel/item/link<br>/rdf:RDF/item/link | /feed/entry/link[@rel=”alternate”]/@href<br>/feed/entry/link[not(@rel)]/@href
|
||||
Updated | /rss/channel/item/dc:date<br>/rdf:RDF/rdf:item/dc:date | /feed/entry/modified<br>/feed/entry/updated
|
||||
Published | /rss/channel/item/pubDate<br>/rss/channel/item/dc:date | /feed/entry/published<br>/feed/entry/issued
|
||||
Author | /rss/channel/item/author<br>/rss/channel/item/dc:author<br>/rdf:RDF/item/dc:author<br>/rss/channel/item/dc:creator<br>/rdf:RDF/item/dc:creator<br>/rss/channel/item/itunes:author | /feed/entry/author
|
||||
GUID | /rss/channel/item/guid | /feed/entry/id
|
||||
Image | /rss/channel/item/itunes:image<br>/rss/channel/item/media:image |
|
||||
Categories | /rss/channel/item/category<br>/rss/channel/item/dc:subject<br>/rss/channel/item/itunes:keywords<br>/rdf:RDF/channel/item/dc:subject | /feed/entry/category
|
||||
Enclosures | /rss/channel/item/enclosure | /feed/entry/link[@rel=”enclosure”]
|
||||
|
||||
## Dependencies
|
||||
|
||||
* [goxpp](https://github.com/mmcdole/goxpp) - XML Pull Parser
|
||||
* [goquery](https://github.com/PuerkitoBio/goquery) - Go jQuery-like interface
|
||||
* [testify](https://github.com/stretchr/testify) - Unit test enhancements
|
||||
- [goxpp](https://github.com/mmcdole/goxpp) - XML Pull Parser
|
||||
- [goquery](https://github.com/PuerkitoBio/goquery) - Go jQuery-like interface
|
||||
- [testify](https://github.com/stretchr/testify) - Unit test enhancements
|
||||
- [jsoniter](https://github.com/json-iterator/go) - Faster JSON Parsing
|
||||
|
||||
## License
|
||||
|
||||
|
@ -248,8 +272,9 @@ This project is licensed under the [MIT License](https://raw.githubusercontent.c
|
|||
|
||||
## Credits
|
||||
|
||||
* [cristoper](https://github.com/cristoper) for his work on implementing xml:base relative URI handling.
|
||||
* [Mark Pilgrim](https://en.wikipedia.org/wiki/Mark_Pilgrim) and [Kurt McKee](http://kurtmckee.org) for their work on the excellent [Universal Feed Parser](https://github.com/kurtmckee/feedparser) Python library. This library was the inspiration for the `gofeed` library.
|
||||
* [Dan MacTough](http://blog.mact.me) for his work on [node-feedparser](https://github.com/danmactough/node-feedparser). It provided inspiration for the set of fields that should be covered in the hybrid `gofeed.Feed` model.
|
||||
* [Matt Jibson](https://mattjibson.com/) for his date parsing function in the [goread](https://github.com/mjibson/goread) project.
|
||||
* [Jim Teeuwen](https://github.com/jteeuwen) for his method of representing arbitrary feed extensions in the [go-pkg-rss](https://github.com/jteeuwen/go-pkg-rss) library.
|
||||
- [cristoper](https://github.com/cristoper) for his work on implementing xml:base relative URI handling.
|
||||
- [Mark Pilgrim](https://en.wikipedia.org/wiki/Mark_Pilgrim) and [Kurt McKee](http://kurtmckee.org) for their work on the excellent [Universal Feed Parser](https://github.com/kurtmckee/feedparser) Python library. This library was the inspiration for the `gofeed` library.
|
||||
- [Dan MacTough](http://blog.mact.me) for his work on [node-feedparser](https://github.com/danmactough/node-feedparser). It provided inspiration for the set of fields that should be covered in the hybrid `gofeed.Feed` model.
|
||||
- [Matt Jibson](https://mattjibson.com/) for his date parsing function in the [goread](https://github.com/mjibson/goread) project.
|
||||
- [Jim Teeuwen](https://github.com/jteeuwen) for his method of representing arbitrary feed extensions in the [go-pkg-rss](https://github.com/jteeuwen/go-pkg-rss) library.
|
||||
- [Sudhanshu Raheja](https://revolt.ist) for supporting JSON Feed parser
|
||||
|
|
32
vendor/github.com/mmcdole/gofeed/detector.go
generated
vendored
32
vendor/github.com/mmcdole/gofeed/detector.go
generated
vendored
|
@ -1,9 +1,11 @@
|
|||
package gofeed
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/mmcdole/gofeed/internal/shared"
|
||||
xpp "github.com/mmcdole/goxpp"
|
||||
)
|
||||
|
@ -20,13 +22,34 @@ const (
|
|||
FeedTypeAtom
|
||||
// FeedTypeRSS represents an RSS feed
|
||||
FeedTypeRSS
|
||||
// FeedTypeJSON represents a JSON feed
|
||||
FeedTypeJSON
|
||||
)
|
||||
|
||||
// DetectFeedType attempts to determine the type of feed
|
||||
// by looking for specific xml elements unique to the
|
||||
// various feed types.
|
||||
func DetectFeedType(feed io.Reader) FeedType {
|
||||
p := xpp.NewXMLPullParser(feed, false, shared.NewReaderLabel)
|
||||
buffer := new(bytes.Buffer)
|
||||
buffer.ReadFrom(feed)
|
||||
|
||||
// remove leading whitespace (if exists)
|
||||
var firstChar byte
|
||||
for {
|
||||
ch, err := buffer.ReadByte()
|
||||
if err != nil {
|
||||
return FeedTypeUnknown
|
||||
}
|
||||
if ch != ' ' && ch != '\t' {
|
||||
firstChar = ch
|
||||
buffer.UnreadByte()
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if firstChar == '<' {
|
||||
// Check if it's an XML based feed
|
||||
p := xpp.NewXMLPullParser(bytes.NewReader(buffer.Bytes()), false, shared.NewReaderLabel)
|
||||
|
||||
xmlBase := shared.XMLBase{}
|
||||
_, err := xmlBase.FindRoot(p)
|
||||
|
@ -45,4 +68,11 @@ func DetectFeedType(feed io.Reader) FeedType {
|
|||
default:
|
||||
return FeedTypeUnknown
|
||||
}
|
||||
} else if firstChar == '{' {
|
||||
// Check if document is valid JSON
|
||||
if jsoniter.Valid(buffer.Bytes()) {
|
||||
return FeedTypeJSON
|
||||
}
|
||||
}
|
||||
return FeedTypeUnknown
|
||||
}
|
||||
|
|
6
vendor/github.com/mmcdole/gofeed/extensions/itunes.go
generated
vendored
6
vendor/github.com/mmcdole/gofeed/extensions/itunes.go
generated
vendored
|
@ -29,7 +29,10 @@ type ITunesItemExtension struct {
|
|||
Summary string `json:"summary,omitempty"`
|
||||
Image string `json:"image,omitempty"`
|
||||
IsClosedCaptioned string `json:"isClosedCaptioned,omitempty"`
|
||||
Episode string `json:"episode,omitempty"`
|
||||
Season string `json:"season,omitempty"`
|
||||
Order string `json:"order,omitempty"`
|
||||
EpisodeType string `json:"episodeType,omitempty"`
|
||||
}
|
||||
|
||||
// ITunesCategory is a category element for itunes feeds.
|
||||
|
@ -76,7 +79,10 @@ func NewITunesItemExtension(extensions map[string][]Extension) *ITunesItemExtens
|
|||
entry.Keywords = parseTextExtension("keywords", extensions)
|
||||
entry.Image = parseImage(extensions)
|
||||
entry.IsClosedCaptioned = parseTextExtension("isClosedCaptioned", extensions)
|
||||
entry.Episode = parseTextExtension("episode", extensions)
|
||||
entry.Season = parseTextExtension("season", extensions)
|
||||
entry.Order = parseTextExtension("order", extensions)
|
||||
entry.EpisodeType = parseTextExtension("episodeType", extensions)
|
||||
return entry
|
||||
}
|
||||
|
||||
|
|
20
vendor/github.com/mmcdole/gofeed/feed.go
generated
vendored
20
vendor/github.com/mmcdole/gofeed/feed.go
generated
vendored
|
@ -10,6 +10,8 @@ import (
|
|||
// Feed is the universal Feed type that atom.Feed
|
||||
// and rss.Feed gets translated to. It represents
|
||||
// a web feed.
|
||||
// Sorting with sort.Sort will order the Items by
|
||||
// oldest to newest publish time.
|
||||
type Feed struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
|
@ -82,3 +84,21 @@ type Enclosure struct {
|
|||
Length string `json:"length,omitempty"`
|
||||
Type string `json:"type,omitempty"`
|
||||
}
|
||||
|
||||
// Len returns the length of Items.
|
||||
func (f Feed) Len() int {
|
||||
return len(f.Items)
|
||||
}
|
||||
|
||||
// Less compares PublishedParsed of Items[i], Items[k]
|
||||
// and returns true if Items[i] is less than Items[k].
|
||||
func (f Feed) Less(i, k int) bool {
|
||||
return f.Items[i].PublishedParsed.Before(
|
||||
*f.Items[k].PublishedParsed,
|
||||
)
|
||||
}
|
||||
|
||||
// Swap swaps Items[i] and Items[k].
|
||||
func (f Feed) Swap(i, k int) {
|
||||
f.Items[i], f.Items[k] = f.Items[k], f.Items[i]
|
||||
}
|
||||
|
|
14
vendor/github.com/mmcdole/gofeed/go.mod
generated
vendored
14
vendor/github.com/mmcdole/gofeed/go.mod
generated
vendored
|
@ -1,12 +1,14 @@
|
|||
module github.com/mmcdole/gofeed
|
||||
|
||||
go 1.14
|
||||
|
||||
require (
|
||||
github.com/PuerkitoBio/goquery v1.5.0
|
||||
github.com/codegangsta/cli v1.20.0
|
||||
github.com/PuerkitoBio/goquery v1.5.1
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/json-iterator/go v1.1.10
|
||||
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/stretchr/testify v1.2.2
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3
|
||||
golang.org/x/text v0.3.0
|
||||
github.com/stretchr/testify v1.3.0
|
||||
github.com/urfave/cli v1.22.3
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a
|
||||
golang.org/x/text v0.3.2
|
||||
)
|
||||
|
|
43
vendor/github.com/mmcdole/gofeed/go.sum
generated
vendored
43
vendor/github.com/mmcdole/gofeed/go.sum
generated
vendored
|
@ -1,20 +1,45 @@
|
|||
github.com/PuerkitoBio/goquery v1.5.0 h1:uGvmFXOA73IKluu/F84Xd1tt/z07GYm8X49XKHP7EJk=
|
||||
github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
|
||||
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
|
||||
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
github.com/codegangsta/cli v1.20.0 h1:iX1FXEgwzd5+XN6wk5cVHOGQj6Q3Dcp20lUeS4lHNTw=
|
||||
github.com/codegangsta/cli v1.20.0/go.mod h1:/qJNoX69yVSKu5o4jLyXAENLRyk1uhi7zkbQ3slBdOA=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/PuerkitoBio/goquery v1.5.1 h1:PSPBGne8NIUWw+/7vFBV+kG2J/5MOjbzc7154OaKCSE=
|
||||
github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc=
|
||||
github.com/andybalholm/cascadia v1.1.0 h1:BuuO6sSfQNFRu1LppgbD25Hr2vLYW25JvxHs5zzsLTo=
|
||||
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf h1:sWGE2v+hO0Nd4yFU/S/mDBM5plIU8v/Qhfz41hkDIAI=
|
||||
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf/go.mod h1:pasqhqstspkosTneA62Nc+2p9SOBBYAPbnmRRWPQ0V8=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/urfave/cli v1.22.3 h1:FpNT6zq26xNpHZy08emi755QwzLPs6Pukqjlc7RfOMU=
|
||||
github.com/urfave/cli v1.22.3/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3 h1:eH6Eip3UpmR+yM/qI9Ijluzb1bNv/cAU/n+6l8tRSis=
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a h1:GuSPYbZzB5/dcLNCwLQLsg3obCJtX9IJhpXkvY7kzk0=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
|
61
vendor/github.com/mmcdole/gofeed/internal/shared/parseutils.go
generated
vendored
61
vendor/github.com/mmcdole/gofeed/internal/shared/parseutils.go
generated
vendored
|
@ -3,9 +3,8 @@ package shared
|
|||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"html"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
xpp "github.com/mmcdole/goxpp"
|
||||
|
@ -96,60 +95,30 @@ func DecodeEntities(str string) (string, error) {
|
|||
break
|
||||
}
|
||||
|
||||
// Write and skip everything before it
|
||||
buf.Write(data[:idx])
|
||||
data = data[idx+1:]
|
||||
data = data[idx:]
|
||||
|
||||
if len(data) == 0 {
|
||||
return "", TruncatedEntity
|
||||
// If there is only the '&' left here
|
||||
if len(data) == 1 {
|
||||
buf.Write(data)
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// Find the end of the entity
|
||||
end := bytes.IndexByte(data, ';')
|
||||
if end == -1 {
|
||||
return "", TruncatedEntity
|
||||
// it's not an entitiy. just a plain old '&' possibly with extra bytes
|
||||
buf.Write(data)
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
if data[0] == '#' {
|
||||
// Numerical character reference
|
||||
var str string
|
||||
base := 10
|
||||
|
||||
if len(data) > 1 && data[1] == 'x' {
|
||||
str = string(data[2:end])
|
||||
base = 16
|
||||
// Check if there is a space somewhere within the 'entitiy'.
|
||||
// If there is then skip the whole thing since it's not a real entity.
|
||||
if strings.Contains(string(data[1:end]), " ") {
|
||||
buf.Write(data)
|
||||
return buf.String(), nil
|
||||
} else {
|
||||
str = string(data[1:end])
|
||||
}
|
||||
|
||||
i, err := strconv.ParseUint(str, base, 32)
|
||||
if err != nil {
|
||||
return "", InvalidNumericReference
|
||||
}
|
||||
|
||||
buf.WriteRune(rune(i))
|
||||
} else {
|
||||
// Predefined entity
|
||||
name := string(data[:end])
|
||||
|
||||
var c byte
|
||||
switch name {
|
||||
case "lt":
|
||||
c = '<'
|
||||
case "gt":
|
||||
c = '>'
|
||||
case "quot":
|
||||
c = '"'
|
||||
case "apos":
|
||||
c = '\''
|
||||
case "amp":
|
||||
c = '&'
|
||||
default:
|
||||
return "", fmt.Errorf("unknown predefined "+
|
||||
"entity &%s;", name)
|
||||
}
|
||||
|
||||
buf.WriteByte(c)
|
||||
buf.WriteString(html.UnescapeString(string(data[0 : end+1])))
|
||||
}
|
||||
|
||||
// Skip the entity
|
||||
|
|
62
vendor/github.com/mmcdole/gofeed/json/feed.go
generated
vendored
Normal file
62
vendor/github.com/mmcdole/gofeed/json/feed.go
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
package json
|
||||
|
||||
import "encoding/json"
|
||||
|
||||
// Feed describes the structure for JSON Feed v1.0
|
||||
// https://www.jsonfeed.org/version/1/
|
||||
type Feed struct {
|
||||
Version string `json:"version"` // version (required, string) is the URL of the version of the format the feed uses
|
||||
Title string `json:"title,omitempty"` // title (required, string) is the name of the feed
|
||||
HomePageURL string `json:"home_page_url,omitempty"` // home_page_url (optional but strongly recommended, string) is the URL of the resource that the feed describes. This resource should be an HTML page
|
||||
FeedURL string `json:"feed_url,omitempty"` // feed_url (optional but strongly recommended, string) is the URL of the feed, and serves as the unique identifier for the feed
|
||||
Description string `json:"description,omitempty"` // description (optional, string)
|
||||
UserComment string `json:"user_comment,omitempty"` // user_comment (optional, string) is a description of the purpose of the feed. This is for the use of people looking at the raw JSON, and should be ignored by feed readers.
|
||||
NextURL string `json:"next_url,omitempty"` // next_url (optional, string) is the URL of a feed that provides the next n items. This allows for pagination
|
||||
Icon string `json:"icon,omitempty"` // icon (optional, string) is the URL of an image for the feed suitable to be used in a timeline. It should be square and relatively large — such as 512 x 512
|
||||
Favicon string `json:"favicon,omitempty"` // favicon (optional, string) is the URL of an image for the feed suitable to be used in a source list. It should be square and relatively small, but not smaller than 64 x 64
|
||||
Author *Author `json:"author,omitempty"` // author (optional, object) specifies the feed author. The author object has several members. These are all optional — but if you provide an author object, then at least one is required:
|
||||
Expired bool `json:"expired,omitempty"` // expired (optional, boolean) says whether or not the feed is finished — that is, whether or not it will ever update again.
|
||||
Items []*Item `json:"items"` // items is an array, and is required
|
||||
// TODO Hubs // hubs (very optional, array of objects) describes endpoints that can be used to subscribe to real-time notifications from the publisher of this feed. Each object has a type and url, both of which are required. See the section “Subscribing to Real-time Notifications” below for details.
|
||||
// TODO Extensions
|
||||
}
|
||||
|
||||
func (f Feed) String() string {
|
||||
json, _ := json.MarshalIndent(f, "", " ")
|
||||
return string(json)
|
||||
}
|
||||
|
||||
// Item defines an item in the feed
|
||||
type Item struct {
|
||||
ID string `json:"id,omitempty"` // id (required, string) is unique for that item for that feed over time. If an id is presented as a number or other type, a JSON Feed reader must coerce it to a string. Ideally, the id is the full URL of the resource described by the item, since URLs make great unique identifiers.
|
||||
URL string `json:"url,omitempty"` // url (optional, string) is the URL of the resource described by the item. It’s the permalink
|
||||
ExternalURL string `json:"external_url,omitempty"` // external_url (very optional, string) is the URL of a page elsewhere. This is especially useful for linkblogs
|
||||
Title string `json:"title,omitempty"` // title (optional, string) is plain text. Microblog items in particular may omit titles.
|
||||
ContentHTML string `json:"content_html,omitempty"` // content_html and content_text are each optional strings — but one or both must be present. This is the HTML or plain text of the item. Important: the only place HTML is allowed in this format is in content_html. A Twitter-like service might use content_text, while a blog might use content_html. Use whichever makes sense for your resource. (It doesn’t even have to be the same for each item in a feed.)
|
||||
ContentText string `json:"content_text,omitempty"` // Same as above
|
||||
Summary string `json:"summary,omitempty"` // summary (optional, string) is a plain text sentence or two describing the item.
|
||||
Image string `json:"image,omitempty"` // image (optional, string) is the URL of the main image for the item. This image may also appear in the content_html
|
||||
BannerImage string `json:"banner_image,omitempty"` // banner_image (optional, string) is the URL of an image to use as a banner.
|
||||
DatePublished string `json:"date_published,omitempty"` // date_published (optional, string) specifies the date in RFC 3339 format. (Example: 2010-02-07T14:04:00-05:00.)
|
||||
DateModified string `json:"date_modified,omitempty"` // date_modified (optional, string) specifies the modification date in RFC 3339 format.
|
||||
Author *Author `json:"author,omitempty"` // author (optional, object) has the same structure as the top-level author. If not specified in an item, then the top-level author, if present, is the author of the item.
|
||||
Tags []string `json:"tags,omitempty"` // tags (optional, array of strings) can have any plain text values you want. Tags tend to be just one word, but they may be anything.
|
||||
Attachments *[]Attachments `json:"attachments,omitempty"` // attachments (optional, array) lists related resources. Podcasts, for instance, would include an attachment that’s an audio or video file. An individual item may have one or more attachments.
|
||||
// TODO Extensions
|
||||
}
|
||||
|
||||
// Author defines the feed author structure. The author object has several members. These are all optional — but if you provide an author object, then at least one is required:
|
||||
type Author struct {
|
||||
Name string `json:"name,omitempty"` // name (optional, string) is the author’s name.
|
||||
URL string `json:"url,omitempty"` // url (optional, string) is the URL of a site owned by the author
|
||||
Avatar string `json:"avatar,omitempty"` // avatar (optional, string) is the URL for an image for the author. It should be square and relatively large — such as 512 x 512
|
||||
}
|
||||
|
||||
// Attachments defines the structure for related sources. Podcasts, for instance, would include an attachment that’s an audio or video file
|
||||
type Attachments struct {
|
||||
URL string `json:"url,omitempty"` // url (required, string) specifies the location of the attachment.
|
||||
MimeType string `json:"mime_type,omitempty"` // mime_type (required, string) specifies the type of the attachment, such as “audio/mpeg.”
|
||||
Title string `json:"title,omitempty"` // title (optional, string) is a name for the attachment. Important: if there are multiple attachments, and two or more have the exact same title (when title is present), then they are considered as alternate representations of the same thing. In this way a podcaster, for instance, might provide an audio recording in different formats.
|
||||
SizeInBytes int64 `json:"size_in_bytes,omitempty"` // size_in_bytes (optional, number) specifies how large the file is.
|
||||
DurationInSeconds int64 `json:"duration_in_seconds,omitempty"` // duration_in_seconds (optional, number) specifies how long it takes to listen to or watch, when played at normal speed.
|
||||
}
|
29
vendor/github.com/mmcdole/gofeed/json/parser.go
generated
vendored
Normal file
29
vendor/github.com/mmcdole/gofeed/json/parser.go
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
var (
|
||||
j = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
)
|
||||
|
||||
// Parser is an JSON Feed Parser
|
||||
type Parser struct{}
|
||||
|
||||
// Parse parses an json feed into an json.Feed
|
||||
func (ap *Parser) Parse(feed io.Reader) (*Feed, error) {
|
||||
jsonFeed := &Feed{}
|
||||
|
||||
buffer := new(bytes.Buffer)
|
||||
buffer.ReadFrom(feed)
|
||||
|
||||
err := j.Unmarshal(buffer.Bytes(), jsonFeed)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return jsonFeed, err
|
||||
}
|
35
vendor/github.com/mmcdole/gofeed/parser.go
generated
vendored
35
vendor/github.com/mmcdole/gofeed/parser.go
generated
vendored
|
@ -2,6 +2,7 @@ package gofeed
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
@ -9,6 +10,7 @@ import (
|
|||
"strings"
|
||||
|
||||
"github.com/mmcdole/gofeed/atom"
|
||||
"github.com/mmcdole/gofeed/json"
|
||||
"github.com/mmcdole/gofeed/rss"
|
||||
)
|
||||
|
||||
|
@ -32,9 +34,11 @@ func (err HTTPError) Error() string {
|
|||
type Parser struct {
|
||||
AtomTranslator Translator
|
||||
RSSTranslator Translator
|
||||
JSONTranslator Translator
|
||||
Client *http.Client
|
||||
rp *rss.Parser
|
||||
ap *atom.Parser
|
||||
jp *json.Parser
|
||||
}
|
||||
|
||||
// NewParser creates a universal feed parser.
|
||||
|
@ -42,13 +46,14 @@ func NewParser() *Parser {
|
|||
fp := Parser{
|
||||
rp: &rss.Parser{},
|
||||
ap: &atom.Parser{},
|
||||
jp: &json.Parser{},
|
||||
}
|
||||
return &fp
|
||||
}
|
||||
|
||||
// Parse parses a RSS or Atom feed into
|
||||
// Parse parses a RSS or Atom or JSON feed into
|
||||
// the universal gofeed.Feed. It takes an
|
||||
// io.Reader which should return the xml content.
|
||||
// io.Reader which should return the xml/json content.
|
||||
func (f *Parser) Parse(feed io.Reader) (*Feed, error) {
|
||||
// Wrap the feed io.Reader in a io.TeeReader
|
||||
// so we can capture all the bytes read by the
|
||||
|
@ -68,6 +73,8 @@ func (f *Parser) Parse(feed io.Reader) (*Feed, error) {
|
|||
return f.parseAtomFeed(r)
|
||||
case FeedTypeRSS:
|
||||
return f.parseRSSFeed(r)
|
||||
case FeedTypeJSON:
|
||||
return f.parseJSONFeed(r)
|
||||
}
|
||||
|
||||
return nil, ErrFeedTypeNotDetected
|
||||
|
@ -76,12 +83,20 @@ func (f *Parser) Parse(feed io.Reader) (*Feed, error) {
|
|||
// ParseURL fetches the contents of a given url and
|
||||
// attempts to parse the response into the universal feed type.
|
||||
func (f *Parser) ParseURL(feedURL string) (feed *Feed, err error) {
|
||||
return f.ParseURLWithContext(feedURL, context.Background())
|
||||
}
|
||||
|
||||
// ParseURLWithContext fetches contents of a given url and
|
||||
// attempts to parse the response into the universal feed type.
|
||||
// Request could be canceled or timeout via given context
|
||||
func (f *Parser) ParseURLWithContext(feedURL string, ctx context.Context) (feed *Feed, err error) {
|
||||
client := f.httpClient()
|
||||
|
||||
req, err := http.NewRequest("GET", feedURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("User-Agent", "Gofeed/1.0")
|
||||
resp, err := client.Do(req)
|
||||
|
||||
|
@ -131,6 +146,14 @@ func (f *Parser) parseRSSFeed(feed io.Reader) (*Feed, error) {
|
|||
return f.rssTrans().Translate(rf)
|
||||
}
|
||||
|
||||
func (f *Parser) parseJSONFeed(feed io.Reader) (*Feed, error) {
|
||||
jf, err := f.jp.Parse(feed)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return f.jsonTrans().Translate(jf)
|
||||
}
|
||||
|
||||
func (f *Parser) atomTrans() Translator {
|
||||
if f.AtomTranslator != nil {
|
||||
return f.AtomTranslator
|
||||
|
@ -147,6 +170,14 @@ func (f *Parser) rssTrans() Translator {
|
|||
return f.RSSTranslator
|
||||
}
|
||||
|
||||
func (f *Parser) jsonTrans() Translator {
|
||||
if f.JSONTranslator != nil {
|
||||
return f.JSONTranslator
|
||||
}
|
||||
f.JSONTranslator = &DefaultJSONTranslator{}
|
||||
return f.JSONTranslator
|
||||
}
|
||||
|
||||
func (f *Parser) httpClient() *http.Client {
|
||||
if f.Client != nil {
|
||||
return f.Client
|
||||
|
|
268
vendor/github.com/mmcdole/gofeed/translator.go
generated
vendored
268
vendor/github.com/mmcdole/gofeed/translator.go
generated
vendored
|
@ -8,10 +8,11 @@ import (
|
|||
"github.com/mmcdole/gofeed/atom"
|
||||
ext "github.com/mmcdole/gofeed/extensions"
|
||||
"github.com/mmcdole/gofeed/internal/shared"
|
||||
"github.com/mmcdole/gofeed/json"
|
||||
"github.com/mmcdole/gofeed/rss"
|
||||
)
|
||||
|
||||
// Translator converts a particular feed (atom.Feed or rss.Feed)
|
||||
// Translator converts a particular feed (atom.Feed or rss.Feed of json.Feed)
|
||||
// into the generic Feed struct
|
||||
type Translator interface {
|
||||
Translate(feed interface{}) (*Feed, error)
|
||||
|
@ -103,8 +104,8 @@ func (t *DefaultRSSTranslator) translateFeedFeedLink(rss *rss.Feed) (link string
|
|||
for _, ex := range atomExtensions {
|
||||
if links, ok := ex["link"]; ok {
|
||||
for _, l := range links {
|
||||
if l.Attrs["Rel"] == "self" {
|
||||
link = l.Value
|
||||
if l.Attrs["rel"] == "self" {
|
||||
link = l.Attrs["href"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -684,3 +685,264 @@ func (t *DefaultAtomTranslator) firstPerson(persons []*atom.Person) (person *ato
|
|||
person = persons[0]
|
||||
return
|
||||
}
|
||||
|
||||
// DefaultJSONTranslator converts an json.Feed struct
|
||||
// into the generic Feed struct.
|
||||
//
|
||||
// This default implementation defines a set of
|
||||
// mapping rules between json.Feed -> Feed
|
||||
// for each of the fields in Feed.
|
||||
type DefaultJSONTranslator struct{}
|
||||
|
||||
// Translate converts an JSON feed into the universal
|
||||
// feed type.
|
||||
func (t *DefaultJSONTranslator) Translate(feed interface{}) (*Feed, error) {
|
||||
json, found := feed.(*json.Feed)
|
||||
if !found {
|
||||
return nil, fmt.Errorf("Feed did not match expected type of *json.Feed")
|
||||
}
|
||||
|
||||
result := &Feed{}
|
||||
result.FeedVersion = json.Version
|
||||
result.Title = t.translateFeedTitle(json)
|
||||
result.Link = t.translateFeedLink(json)
|
||||
result.FeedLink = t.translateFeedFeedLink(json)
|
||||
result.Description = t.translateFeedDescription(json)
|
||||
result.Image = t.translateFeedImage(json)
|
||||
result.Author = t.translateFeedAuthor(json)
|
||||
result.Items = t.translateFeedItems(json)
|
||||
result.Updated = t.translateFeedUpdated(json)
|
||||
result.UpdatedParsed = t.translateFeedUpdatedParsed(json)
|
||||
result.Published = t.translateFeedPublished(json)
|
||||
result.PublishedParsed = t.translateFeedPublishedParsed(json)
|
||||
result.FeedType = "json"
|
||||
// TODO UserComment is missing in global Feed
|
||||
// TODO NextURL is missing in global Feed
|
||||
// TODO Favicon is missing in global Feed
|
||||
// TODO Exipred is missing in global Feed
|
||||
// TODO Hubs is not supported in json.Feed
|
||||
// TODO Extensions is not supported in json.Feed
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedItem(jsonItem *json.Item) (item *Item) {
|
||||
item = &Item{}
|
||||
item.GUID = t.translateItemGUID(jsonItem)
|
||||
item.Link = t.translateItemLink(jsonItem)
|
||||
item.Title = t.translateItemTitle(jsonItem)
|
||||
item.Content = t.translateItemContent(jsonItem)
|
||||
item.Description = t.translateItemDescription(jsonItem)
|
||||
item.Image = t.translateItemImage(jsonItem)
|
||||
item.Published = t.translateItemPublished(jsonItem)
|
||||
item.PublishedParsed = t.translateItemPublishedParsed(jsonItem)
|
||||
item.Updated = t.translateItemUpdated(jsonItem)
|
||||
item.UpdatedParsed = t.translateItemUpdatedParsed(jsonItem)
|
||||
item.Author = t.translateItemAuthor(jsonItem)
|
||||
item.Categories = t.translateItemCategories(jsonItem)
|
||||
item.Enclosures = t.translateItemEnclosures(jsonItem)
|
||||
// TODO ExternalURL is missing in global Feed
|
||||
// TODO BannerImage is missing in global Feed
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedTitle(json *json.Feed) (title string) {
|
||||
if json.Title != "" {
|
||||
title = json.Title
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedDescription(json *json.Feed) (desc string) {
|
||||
return json.Description
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedLink(json *json.Feed) (link string) {
|
||||
if json.HomePageURL != "" {
|
||||
link = json.HomePageURL
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedFeedLink(json *json.Feed) (link string) {
|
||||
if json.FeedURL != "" {
|
||||
link = json.FeedURL
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedUpdated(json *json.Feed) (updated string) {
|
||||
if len(json.Items) > 0 {
|
||||
updated = json.Items[0].DateModified
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedUpdatedParsed(json *json.Feed) (updated *time.Time) {
|
||||
if len(json.Items) > 0 {
|
||||
updateTime, err := shared.ParseDate(json.Items[0].DateModified)
|
||||
if err == nil {
|
||||
updated = &updateTime
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedPublished(json *json.Feed) (published string) {
|
||||
if len(json.Items) > 0 {
|
||||
published = json.Items[0].DatePublished
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedPublishedParsed(json *json.Feed) (published *time.Time) {
|
||||
if len(json.Items) > 0 {
|
||||
publishTime, err := shared.ParseDate(json.Items[0].DatePublished)
|
||||
if err == nil {
|
||||
published = &publishTime
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedAuthor(json *json.Feed) (author *Person) {
|
||||
if json.Author != nil {
|
||||
name, address := shared.ParseNameAddress(json.Author.Name)
|
||||
author = &Person{}
|
||||
author.Name = name
|
||||
author.Email = address
|
||||
}
|
||||
// Author.URL is missing in global feed
|
||||
// Author.Avatar is missing in global feed
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedImage(json *json.Feed) (image *Image) {
|
||||
// Using the Icon rather than the image
|
||||
// icon (optional, string) is the URL of an image for the feed suitable to be used in a timeline. It should be square and relatively large — such as 512 x 512
|
||||
if json.Icon != "" {
|
||||
image = &Image{}
|
||||
image.URL = json.Icon
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateFeedItems(json *json.Feed) (items []*Item) {
|
||||
items = []*Item{}
|
||||
for _, i := range json.Items {
|
||||
items = append(items, t.translateFeedItem(i))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemTitle(jsonItem *json.Item) (title string) {
|
||||
if jsonItem.Title != "" {
|
||||
title = jsonItem.Title
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemDescription(jsonItem *json.Item) (desc string) {
|
||||
if jsonItem.Summary != "" {
|
||||
desc = jsonItem.Summary
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemContent(jsonItem *json.Item) (content string) {
|
||||
if jsonItem.ContentHTML != "" {
|
||||
content = jsonItem.ContentHTML
|
||||
} else if jsonItem.ContentText != "" {
|
||||
content = jsonItem.ContentText
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemLink(jsonItem *json.Item) (link string) {
|
||||
return jsonItem.URL
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemUpdated(jsonItem *json.Item) (updated string) {
|
||||
if jsonItem.DateModified != "" {
|
||||
updated = jsonItem.DateModified
|
||||
}
|
||||
return updated
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemUpdatedParsed(jsonItem *json.Item) (updated *time.Time) {
|
||||
if jsonItem.DateModified != "" {
|
||||
updatedTime, err := shared.ParseDate(jsonItem.DateModified)
|
||||
if err == nil {
|
||||
updated = &updatedTime
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemPublished(jsonItem *json.Item) (pubDate string) {
|
||||
if jsonItem.DatePublished != "" {
|
||||
pubDate = jsonItem.DatePublished
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemPublishedParsed(jsonItem *json.Item) (pubDate *time.Time) {
|
||||
if jsonItem.DatePublished != "" {
|
||||
publishTime, err := shared.ParseDate(jsonItem.DatePublished)
|
||||
if err == nil {
|
||||
pubDate = &publishTime
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemAuthor(jsonItem *json.Item) (author *Person) {
|
||||
if jsonItem.Author != nil {
|
||||
name, address := shared.ParseNameAddress(jsonItem.Author.Name)
|
||||
author = &Person{}
|
||||
author.Name = name
|
||||
author.Email = address
|
||||
}
|
||||
// Author.URL is missing in global feed
|
||||
// Author.Avatar is missing in global feed
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemGUID(jsonItem *json.Item) (guid string) {
|
||||
if jsonItem.ID != "" {
|
||||
guid = jsonItem.ID
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemImage(jsonItem *json.Item) (image *Image) {
|
||||
if jsonItem.Image != "" {
|
||||
image = &Image{}
|
||||
image.URL = jsonItem.Image
|
||||
} else if jsonItem.BannerImage != "" {
|
||||
image = &Image{}
|
||||
image.URL = jsonItem.BannerImage
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemCategories(jsonItem *json.Item) (categories []string) {
|
||||
if len(jsonItem.Tags) > 0 {
|
||||
categories = jsonItem.Tags
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *DefaultJSONTranslator) translateItemEnclosures(jsonItem *json.Item) (enclosures []*Enclosure) {
|
||||
if jsonItem.Attachments != nil {
|
||||
for _, attachment := range *jsonItem.Attachments {
|
||||
e := &Enclosure{}
|
||||
e.URL = attachment.URL
|
||||
e.Type = attachment.MimeType
|
||||
e.Length = fmt.Sprintf("%d", attachment.DurationInSeconds)
|
||||
// Title is not defined in global enclosure
|
||||
// SizeInBytes is not defined in global enclosure
|
||||
enclosures = append(enclosures, e)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
37
vendor/github.com/mmcdole/goxpp/README.md
generated
vendored
37
vendor/github.com/mmcdole/goxpp/README.md
generated
vendored
|
@ -3,7 +3,42 @@
|
|||
[![Build Status](https://travis-ci.org/mmcdole/goxpp.svg?branch=master)](https://travis-ci.org/mmcdole/goxpp) [![Coverage Status](https://coveralls.io/repos/github/mmcdole/goxpp/badge.svg?branch=master)](https://coveralls.io/github/mmcdole/goxpp?branch=master) [![License](http://img.shields.io/:license-mit-blue.svg)](http://doge.mit-license.org)
|
||||
[![GoDoc](https://godoc.org/github.com/mmcdole/goxpp?status.svg)](https://godoc.org/github.com/mmcdole/goxpp)
|
||||
|
||||
The `goxpp` library is an XML parser library that is loosely based on the [Java XMLPullParser](http://www.xmlpull.org/v1/download/unpacked/doc/quick_intro.html). This library allows you to easily parse arbitary XML content using a pull parser. You can think of `goxpp` as a lightweight wrapper around Go's XML `Decoder` that provides a set of functions that make it easier to parse XML content than using the raw decoder itself.
|
||||
The `goxpp` library is an XML parser library that is loosely based on the [Java XMLPullParser](http://www.xmlpull.org/v1/download/unpacked/doc/quick_intro.html). This library allows you to easily parse arbitrary XML content using a pull parser. You can think of `goxpp` as a lightweight wrapper around Go's XML `Decoder` that provides a set of functions that make it easier to parse XML content than using the raw decoder itself.
|
||||
|
||||
## Overview
|
||||
|
||||
To begin parsing a XML document using `goxpp` you must pass it an `io.Reader` object for your document:
|
||||
|
||||
```go
|
||||
file, err := os.Open("path/file.xml")
|
||||
parser := xpp.NewXMLPullParser(file, false, charset.NewReader)
|
||||
```
|
||||
|
||||
The `goxpp` library decodes documents into a series of token objects:
|
||||
|
||||
| Token Name |
|
||||
|----------------------------------|
|
||||
| StartDocument |
|
||||
| EndDocument |
|
||||
| StartTag |
|
||||
| EndTag |
|
||||
| Text |
|
||||
| Comment |
|
||||
| ProcessingInstruction |
|
||||
| Directive |
|
||||
| IgnorableWhitespace |
|
||||
|
||||
You will always start at the `StartDocument` token and can use the following functions to walk through a document:
|
||||
|
||||
| Function Name | Description |
|
||||
|----------------------------------|---------------------------------------|
|
||||
| Next() | Advance to the next `Text`, `StartTag`, `EndTag`, `EndDocument` token.<br>Note: skips `Comment`, `Directive` and `ProcessingInstruction` |
|
||||
| NextToken() | Advance to the next token regardless of type. |
|
||||
| NextText() | Advance to the next `Text` token. |
|
||||
| Skip() | Skip the next token. |
|
||||
| DecodeElement(v interface{}) | Decode an entire element from the current tag into a struct.<br>Note: must be at a `StartTag` token |
|
||||
|
||||
|
||||
|
||||
This project is licensed under the [MIT License](https://raw.githubusercontent.com/mmcdole/goxpp/master/LICENSE)
|
||||
|
||||
|
|
1
vendor/github.com/modern-go/concurrent/.gitignore
generated
vendored
Normal file
1
vendor/github.com/modern-go/concurrent/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
/coverage.txt
|
14
vendor/github.com/modern-go/concurrent/.travis.yml
generated
vendored
Normal file
14
vendor/github.com/modern-go/concurrent/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- 1.8.x
|
||||
- 1.x
|
||||
|
||||
before_install:
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- ./test.sh
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
201
vendor/github.com/modern-go/concurrent/LICENSE
generated
vendored
Normal file
201
vendor/github.com/modern-go/concurrent/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
49
vendor/github.com/modern-go/concurrent/README.md
generated
vendored
Normal file
49
vendor/github.com/modern-go/concurrent/README.md
generated
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
# concurrent
|
||||
|
||||
[![Sourcegraph](https://sourcegraph.com/github.com/modern-go/concurrent/-/badge.svg)](https://sourcegraph.com/github.com/modern-go/concurrent?badge)
|
||||
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/modern-go/concurrent)
|
||||
[![Build Status](https://travis-ci.org/modern-go/concurrent.svg?branch=master)](https://travis-ci.org/modern-go/concurrent)
|
||||
[![codecov](https://codecov.io/gh/modern-go/concurrent/branch/master/graph/badge.svg)](https://codecov.io/gh/modern-go/concurrent)
|
||||
[![rcard](https://goreportcard.com/badge/github.com/modern-go/concurrent)](https://goreportcard.com/report/github.com/modern-go/concurrent)
|
||||
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://raw.githubusercontent.com/modern-go/concurrent/master/LICENSE)
|
||||
|
||||
* concurrent.Map: backport sync.Map for go below 1.9
|
||||
* concurrent.Executor: goroutine with explicit ownership and cancellable
|
||||
|
||||
# concurrent.Map
|
||||
|
||||
because sync.Map is only available in go 1.9, we can use concurrent.Map to make code portable
|
||||
|
||||
```go
|
||||
m := concurrent.NewMap()
|
||||
m.Store("hello", "world")
|
||||
elem, found := m.Load("hello")
|
||||
// elem will be "world"
|
||||
// found will be true
|
||||
```
|
||||
|
||||
# concurrent.Executor
|
||||
|
||||
```go
|
||||
executor := concurrent.NewUnboundedExecutor()
|
||||
executor.Go(func(ctx context.Context) {
|
||||
everyMillisecond := time.NewTicker(time.Millisecond)
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
fmt.Println("goroutine exited")
|
||||
return
|
||||
case <-everyMillisecond.C:
|
||||
// do something
|
||||
}
|
||||
}
|
||||
})
|
||||
time.Sleep(time.Second)
|
||||
executor.StopAndWaitForever()
|
||||
fmt.Println("executor stopped")
|
||||
```
|
||||
|
||||
attach goroutine to executor instance, so that we can
|
||||
|
||||
* cancel it by stop the executor with Stop/StopAndWait/StopAndWaitForever
|
||||
* handle panic by callback: the default behavior will no longer crash your application
|
14
vendor/github.com/modern-go/concurrent/executor.go
generated
vendored
Normal file
14
vendor/github.com/modern-go/concurrent/executor.go
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
package concurrent
|
||||
|
||||
import "context"
|
||||
|
||||
// Executor replace go keyword to start a new goroutine
|
||||
// the goroutine should cancel itself if the context passed in has been cancelled
|
||||
// the goroutine started by the executor, is owned by the executor
|
||||
// we can cancel all executors owned by the executor just by stop the executor itself
|
||||
// however Executor interface does not Stop method, the one starting and owning executor
|
||||
// should use the concrete type of executor, instead of this interface.
|
||||
type Executor interface {
|
||||
// Go starts a new goroutine controlled by the context
|
||||
Go(handler func(ctx context.Context))
|
||||
}
|
15
vendor/github.com/modern-go/concurrent/go_above_19.go
generated
vendored
Normal file
15
vendor/github.com/modern-go/concurrent/go_above_19.go
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
//+build go1.9
|
||||
|
||||
package concurrent
|
||||
|
||||
import "sync"
|
||||
|
||||
// Map is a wrapper for sync.Map introduced in go1.9
|
||||
type Map struct {
|
||||
sync.Map
|
||||
}
|
||||
|
||||
// NewMap creates a thread safe Map
|
||||
func NewMap() *Map {
|
||||
return &Map{}
|
||||
}
|
33
vendor/github.com/modern-go/concurrent/go_below_19.go
generated
vendored
Normal file
33
vendor/github.com/modern-go/concurrent/go_below_19.go
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
//+build !go1.9
|
||||
|
||||
package concurrent
|
||||
|
||||
import "sync"
|
||||
|
||||
// Map implements a thread safe map for go version below 1.9 using mutex
|
||||
type Map struct {
|
||||
lock sync.RWMutex
|
||||
data map[interface{}]interface{}
|
||||
}
|
||||
|
||||
// NewMap creates a thread safe map
|
||||
func NewMap() *Map {
|
||||
return &Map{
|
||||
data: make(map[interface{}]interface{}, 32),
|
||||
}
|
||||
}
|
||||
|
||||
// Load is same as sync.Map Load
|
||||
func (m *Map) Load(key interface{}) (elem interface{}, found bool) {
|
||||
m.lock.RLock()
|
||||
elem, found = m.data[key]
|
||||
m.lock.RUnlock()
|
||||
return
|
||||
}
|
||||
|
||||
// Load is same as sync.Map Store
|
||||
func (m *Map) Store(key interface{}, elem interface{}) {
|
||||
m.lock.Lock()
|
||||
m.data[key] = elem
|
||||
m.lock.Unlock()
|
||||
}
|
13
vendor/github.com/modern-go/concurrent/log.go
generated
vendored
Normal file
13
vendor/github.com/modern-go/concurrent/log.go
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
package concurrent
|
||||
|
||||
import (
|
||||
"os"
|
||||
"log"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
// ErrorLogger is used to print out error, can be set to writer other than stderr
|
||||
var ErrorLogger = log.New(os.Stderr, "", 0)
|
||||
|
||||
// InfoLogger is used to print informational message, default to off
|
||||
var InfoLogger = log.New(ioutil.Discard, "", 0)
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue