first
This commit is contained in:
commit
b9c7083dba
9
.drone.yml
Normal file
9
.drone.yml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
pipeline:
|
||||||
|
docker:
|
||||||
|
image: plugins/docker
|
||||||
|
registry: docker.mtfos.xyz
|
||||||
|
repo: docker.mtfos.xyz/mtfos/fblook
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
tags: [latest, "${DRONE_COMMIT}"]
|
||||||
|
when:
|
||||||
|
branch: release
|
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
config.yml
|
12
Dockerfile
Normal file
12
Dockerfile
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
FROM golang:1.11-alpine3.8 as builder
|
||||||
|
WORKDIR /go/src/git.trj.tw/golang/fblook
|
||||||
|
RUN apk add --no-cache make git
|
||||||
|
COPY . .
|
||||||
|
RUN make
|
||||||
|
|
||||||
|
FROM alpine:latest
|
||||||
|
RUN apk add --no-cache ca-certificates
|
||||||
|
WORKDIR /data
|
||||||
|
COPY --from=builder /go/src/git.trj.tw/golang/fblook/fblook /usr/bin
|
||||||
|
COPY config.default.yml config.yml
|
||||||
|
CMD ["/usr/bin/fblook", "-f", "/data/config.yml"]
|
56
Gopkg.lock
generated
Normal file
56
Gopkg.lock
generated
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||||
|
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
digest = "1:a62f6ed230a8cd138a9efbe718e7d0b0294f139266f5f55cd942769a9aac8de2"
|
||||||
|
name = "github.com/PuerkitoBio/goquery"
|
||||||
|
packages = ["."]
|
||||||
|
pruneopts = "UT"
|
||||||
|
revision = "dc2ec5c7ca4d9aae063b79b9f581dd3ea6afd2b2"
|
||||||
|
version = "v1.4.1"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
digest = "1:66b3310cf22cdc96c35ef84ede4f7b9b370971c4025f394c89a2638729653b11"
|
||||||
|
name = "github.com/andybalholm/cascadia"
|
||||||
|
packages = ["."]
|
||||||
|
pruneopts = "UT"
|
||||||
|
revision = "901648c87902174f774fac311d7f176f8647bdaa"
|
||||||
|
version = "v1.0.0"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
digest = "1:ed615c5430ecabbb0fb7629a182da65ecee6523900ac1ac932520860878ffcad"
|
||||||
|
name = "github.com/robfig/cron"
|
||||||
|
packages = ["."]
|
||||||
|
pruneopts = "UT"
|
||||||
|
revision = "b41be1df696709bb6395fe435af20370037c0b4c"
|
||||||
|
version = "v1.1"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
digest = "1:afd623c23f862e01d1b29377b691a65e1ac8aa5df6674eb79d19a16ba95947e9"
|
||||||
|
name = "golang.org/x/net"
|
||||||
|
packages = [
|
||||||
|
"html",
|
||||||
|
"html/atom",
|
||||||
|
]
|
||||||
|
pruneopts = "UT"
|
||||||
|
revision = "2f5d2388922f370f4355f327fcf4cfe9f5583908"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
digest = "1:342378ac4dcb378a5448dd723f0784ae519383532f5e70ade24132c4c8693202"
|
||||||
|
name = "gopkg.in/yaml.v2"
|
||||||
|
packages = ["."]
|
||||||
|
pruneopts = "UT"
|
||||||
|
revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183"
|
||||||
|
version = "v2.2.1"
|
||||||
|
|
||||||
|
[solve-meta]
|
||||||
|
analyzer-name = "dep"
|
||||||
|
analyzer-version = 1
|
||||||
|
input-imports = [
|
||||||
|
"github.com/PuerkitoBio/goquery",
|
||||||
|
"github.com/robfig/cron",
|
||||||
|
"gopkg.in/yaml.v2",
|
||||||
|
]
|
||||||
|
solver-name = "gps-cdcl"
|
||||||
|
solver-version = 1
|
30
Gopkg.toml
Normal file
30
Gopkg.toml
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# Gopkg.toml example
|
||||||
|
#
|
||||||
|
# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html
|
||||||
|
# for detailed Gopkg.toml documentation.
|
||||||
|
#
|
||||||
|
# required = ["github.com/user/thing/cmd/thing"]
|
||||||
|
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||||
|
#
|
||||||
|
# [[constraint]]
|
||||||
|
# name = "github.com/user/project"
|
||||||
|
# version = "1.0.0"
|
||||||
|
#
|
||||||
|
# [[constraint]]
|
||||||
|
# name = "github.com/user/project2"
|
||||||
|
# branch = "dev"
|
||||||
|
# source = "github.com/myfork/project2"
|
||||||
|
#
|
||||||
|
# [[override]]
|
||||||
|
# name = "github.com/x/y"
|
||||||
|
# version = "2.4.0"
|
||||||
|
#
|
||||||
|
# [prune]
|
||||||
|
# non-go = false
|
||||||
|
# go-tests = true
|
||||||
|
# unused-packages = true
|
||||||
|
|
||||||
|
|
||||||
|
[prune]
|
||||||
|
go-tests = true
|
||||||
|
unused-packages = true
|
8
Makefile
Normal file
8
Makefile
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
.PHONY: clean build
|
||||||
|
|
||||||
|
build:
|
||||||
|
GOOS=linux go build -o fblook .
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf fblook && go clean
|
2
config.default.yml
Normal file
2
config.default.yml
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
api_url: ''
|
||||||
|
api_key: ''
|
34
main.go
Normal file
34
main.go
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"git.trj.tw/golang/fblook/module/background"
|
||||||
|
"git.trj.tw/golang/fblook/module/config"
|
||||||
|
"git.trj.tw/golang/fblook/module/options"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
options.RegFlag()
|
||||||
|
flag.Parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
cc := make(chan bool, 0)
|
||||||
|
runOptions := options.GetFlag()
|
||||||
|
|
||||||
|
if runOptions.Help {
|
||||||
|
flag.Usage()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := config.LoadConfig(runOptions.Config)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
background.SetBackground()
|
||||||
|
|
||||||
|
<-cc
|
||||||
|
}
|
14
module/background/background.go
Normal file
14
module/background/background.go
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
package background
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/robfig/cron"
|
||||||
|
)
|
||||||
|
|
||||||
|
var c *cron.Cron
|
||||||
|
|
||||||
|
// SetBackground -
|
||||||
|
func SetBackground() {
|
||||||
|
c = cron.New()
|
||||||
|
c.AddFunc("0 * * * * *", getFacebookPageData)
|
||||||
|
c.Start()
|
||||||
|
}
|
231
module/background/facebook.go
Normal file
231
module/background/facebook.go
Normal file
@ -0,0 +1,231 @@
|
|||||||
|
package background
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"git.trj.tw/golang/fblook/module/config"
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
)
|
||||||
|
|
||||||
|
var idRegex = []*regexp.Regexp{
|
||||||
|
regexp.MustCompile(`[\?|&]story_fbid\=(\d+)`),
|
||||||
|
regexp.MustCompile(`\/posts\/(\d+)`),
|
||||||
|
regexp.MustCompile(`\/photos\/.+?\/(\d+)`),
|
||||||
|
regexp.MustCompile(`\/videos\/(\d+)`),
|
||||||
|
}
|
||||||
|
|
||||||
|
// PageData - facebook fan page data
|
||||||
|
type PageData struct {
|
||||||
|
ID string
|
||||||
|
Text string
|
||||||
|
Time int32
|
||||||
|
Link string
|
||||||
|
}
|
||||||
|
|
||||||
|
type byTime []*PageData
|
||||||
|
|
||||||
|
func (pd byTime) Len() int { return len(pd) }
|
||||||
|
func (pd byTime) Swap(i, j int) { pd[i], pd[j] = pd[j], pd[i] }
|
||||||
|
func (pd byTime) Less(i, j int) bool { return pd[i].Time < pd[j].Time }
|
||||||
|
|
||||||
|
func getFacebookPageData() {
|
||||||
|
ids, err := getPageIDs()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cc := make(chan bool, 2)
|
||||||
|
fmt.Println(ids)
|
||||||
|
for _, v := range ids {
|
||||||
|
cc <- true
|
||||||
|
go lookFacebookPageData(v, cc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPageIDs() (ids []string, err error) {
|
||||||
|
tmpStruct := struct {
|
||||||
|
List []string `json:"list"`
|
||||||
|
}{}
|
||||||
|
|
||||||
|
conf := config.GetConf()
|
||||||
|
u, err := url.Parse(conf.APIURL)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
u, err = u.Parse("/api/private/pages")
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", u.String(), nil)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
req.Header.Set("X-Mtfos-Key", conf.APIKey)
|
||||||
|
|
||||||
|
resp, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = json.Unmarshal(body, &tmpStruct)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ids = tmpStruct.List
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func lookFacebookPageData(pageid string, cc chan bool) {
|
||||||
|
fmt.Println("start look page ::::: ", pageid)
|
||||||
|
defer func() {
|
||||||
|
<-cc
|
||||||
|
}()
|
||||||
|
resp, err := http.Get(fmt.Sprintf("https://facebook.com/%s", pageid))
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("get page html err ", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
doc, err := goquery.NewDocumentFromReader(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("parse doc err ", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var pageData []*PageData
|
||||||
|
sel := doc.Find("div.userContentWrapper")
|
||||||
|
sel.Each(func(idx int, s *goquery.Selection) {
|
||||||
|
timeEl := s.Find("abbr")
|
||||||
|
time, timeExists := timeEl.Attr("data-utime")
|
||||||
|
if !timeExists {
|
||||||
|
fmt.Println("time not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
link, linkExists := timeEl.Parent().Attr("href")
|
||||||
|
if !linkExists {
|
||||||
|
fmt.Println("link not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
postContent := s.Find("div.userContent")
|
||||||
|
text := postContent.Text()
|
||||||
|
postID, idExists := postContent.First().Attr("id")
|
||||||
|
|
||||||
|
if !idExists {
|
||||||
|
idFlag := false
|
||||||
|
for _, v := range idRegex {
|
||||||
|
if v.MatchString(link) {
|
||||||
|
idFlag = true
|
||||||
|
m := v.FindStringSubmatch(link)
|
||||||
|
postID = m[1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !idFlag {
|
||||||
|
fmt.Println("id not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Printf("Time: %s / Text: %s / ID: %s \n", time, text, postID)
|
||||||
|
|
||||||
|
timeInt, err := strconv.ParseInt(time, 10, 32)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("time parse err ", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
re := regexp.MustCompile(`^\/`)
|
||||||
|
pageLink := fmt.Sprintf("https://www.facebook.com/%s", re.ReplaceAllString(link, ""))
|
||||||
|
|
||||||
|
data := &PageData{
|
||||||
|
ID: postID,
|
||||||
|
Text: text,
|
||||||
|
Time: int32(timeInt),
|
||||||
|
Link: pageLink,
|
||||||
|
}
|
||||||
|
|
||||||
|
pageData = append(pageData, data)
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(pageData) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Sort(sort.Reverse(byTime(pageData)))
|
||||||
|
|
||||||
|
lastData := pageData[0]
|
||||||
|
t := int32(time.Now().Unix())
|
||||||
|
|
||||||
|
if (t - 600) < lastData.Time {
|
||||||
|
sendToAPI(pageid, *lastData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type apiPage struct {
|
||||||
|
ID string `json:"id"` // pageid
|
||||||
|
PostID string `json:"post_id"`
|
||||||
|
Link string `json:"link"`
|
||||||
|
Text string `json:"text"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func sendToAPI(pageid string, pageData PageData) {
|
||||||
|
conf := config.GetConf()
|
||||||
|
pagesStruct := struct {
|
||||||
|
Pages []apiPage `json:"pages"`
|
||||||
|
}{}
|
||||||
|
|
||||||
|
jsonStruct := apiPage{}
|
||||||
|
jsonStruct.ID = pageid
|
||||||
|
jsonStruct.PostID = pageData.ID
|
||||||
|
jsonStruct.Link = pageData.Link
|
||||||
|
jsonStruct.Text = pageData.Text
|
||||||
|
|
||||||
|
pagesStruct.Pages = make([]apiPage, 1)
|
||||||
|
pagesStruct.Pages[0] = jsonStruct
|
||||||
|
|
||||||
|
jsonByte, err := json.Marshal(pagesStruct)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
u, err := url.Parse(conf.APIURL)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
u, err = u.Parse("/api/private/pageposts")
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", u.String(), bytes.NewReader(jsonByte))
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
req.Header.Set("X-Mtfos-Key", conf.APIKey)
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
resp, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
}
|
57
module/config/config.go
Normal file
57
module/config/config.go
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
|
||||||
|
"git.trj.tw/golang/fblook/module/utils"
|
||||||
|
yaml "gopkg.in/yaml.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config -
|
||||||
|
type Config struct {
|
||||||
|
APIURL string `yaml:"api_url"`
|
||||||
|
APIKey string `yaml:"api_key"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var conf *Config
|
||||||
|
|
||||||
|
// LoadConfig -
|
||||||
|
func LoadConfig(p ...string) error {
|
||||||
|
var fp string
|
||||||
|
if len(p) > 0 && len(p[0]) > 0 {
|
||||||
|
fp = p[0]
|
||||||
|
} else {
|
||||||
|
wd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fp = path.Join(wd, "config.yml")
|
||||||
|
}
|
||||||
|
fp = utils.ParsePath(fp)
|
||||||
|
|
||||||
|
exists := utils.CheckExists(fp, false)
|
||||||
|
if !exists {
|
||||||
|
return errors.New("config file not exists")
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := ioutil.ReadFile(fp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
conf = &Config{}
|
||||||
|
err = yaml.Unmarshal(data, conf)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetConf -
|
||||||
|
func GetConf() *Config {
|
||||||
|
return conf
|
||||||
|
}
|
26
module/options/options.go
Normal file
26
module/options/options.go
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
package options
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Options - flag options
|
||||||
|
type Options struct {
|
||||||
|
Help bool
|
||||||
|
Config string
|
||||||
|
}
|
||||||
|
|
||||||
|
var opts *Options
|
||||||
|
|
||||||
|
// RegFlag - register flag
|
||||||
|
func RegFlag() {
|
||||||
|
opts = &Options{}
|
||||||
|
flag.StringVar(&opts.Config, "config", "", "config file path (defualt {PWD}/config.yml")
|
||||||
|
flag.StringVar(&opts.Config, "f", "", "config file path (short) (defualt {PWD}/config.yml")
|
||||||
|
flag.BoolVar(&opts.Help, "help", false, "show help")
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetFlag -
|
||||||
|
func GetFlag() *Options {
|
||||||
|
return opts
|
||||||
|
}
|
127
module/utils/utils.go
Normal file
127
module/utils/utils.go
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"reflect"
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PageObject -
|
||||||
|
type PageObject struct {
|
||||||
|
Page int `json:"page" cc:"page"`
|
||||||
|
Total int `json:"total" cc:"total"`
|
||||||
|
Offset int `json:"offset" cc:"offset"`
|
||||||
|
Limit int `json:"limit" cc:"limit"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CalcPage -
|
||||||
|
func CalcPage(count, page, max int) (po PageObject) {
|
||||||
|
if count < 0 {
|
||||||
|
count = 0
|
||||||
|
}
|
||||||
|
if page < 1 {
|
||||||
|
page = 1
|
||||||
|
}
|
||||||
|
if max < 1 {
|
||||||
|
max = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
total := int(math.Ceil(float64(count) / float64(max)))
|
||||||
|
if total < 1 {
|
||||||
|
total = 1
|
||||||
|
}
|
||||||
|
if page > total {
|
||||||
|
page = total
|
||||||
|
}
|
||||||
|
offset := (page - 1) * max
|
||||||
|
if offset > count {
|
||||||
|
offset = count
|
||||||
|
}
|
||||||
|
limit := max
|
||||||
|
|
||||||
|
po = PageObject{}
|
||||||
|
po.Limit = limit
|
||||||
|
po.Page = page
|
||||||
|
po.Offset = offset
|
||||||
|
po.Total = total
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToMap struct to map[string]interface{}
|
||||||
|
func ToMap(ss interface{}) map[string]interface{} {
|
||||||
|
t := reflect.ValueOf(ss)
|
||||||
|
if t.Kind() == reflect.Ptr {
|
||||||
|
t = t.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
smap := make(map[string]interface{})
|
||||||
|
mtag := regexp.MustCompile(`cc:\"(.+)\"`)
|
||||||
|
|
||||||
|
for i := 0; i < t.NumField(); i++ {
|
||||||
|
f := t.Field(i)
|
||||||
|
tag := string(t.Type().Field(i).Tag)
|
||||||
|
str := mtag.FindStringSubmatch(tag)
|
||||||
|
name := t.Type().Field(i).Name
|
||||||
|
if len(str) > 1 {
|
||||||
|
name = str[1]
|
||||||
|
}
|
||||||
|
if name != "-" {
|
||||||
|
smap[name] = f.Interface()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return smap
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParsePath - parse file path to absPath
|
||||||
|
func ParsePath(dst string) string {
|
||||||
|
wd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
wd = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if []rune(dst)[0] == '~' {
|
||||||
|
home := UserHomeDir()
|
||||||
|
if len(home) > 0 {
|
||||||
|
dst = strings.Replace(dst, "~", home, -1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if path.IsAbs(dst) {
|
||||||
|
dst = path.Clean(dst)
|
||||||
|
return dst
|
||||||
|
}
|
||||||
|
|
||||||
|
str := path.Join(wd, dst)
|
||||||
|
str = path.Clean(str)
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserHomeDir - get user home directory
|
||||||
|
func UserHomeDir() string {
|
||||||
|
env := "HOME"
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
env = "USERPROFILE"
|
||||||
|
} else if runtime.GOOS == "plan9" {
|
||||||
|
env = "home"
|
||||||
|
}
|
||||||
|
return os.Getenv(env)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CheckExists - check file exists
|
||||||
|
func CheckExists(filePath string, allowDir bool) bool {
|
||||||
|
filePath = ParsePath(filePath)
|
||||||
|
stat, err := os.Stat(filePath)
|
||||||
|
if err != nil && !os.IsExist(err) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !allowDir && stat.IsDir() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
1
vendor/github.com/PuerkitoBio/goquery/.gitattributes
generated
vendored
Normal file
1
vendor/github.com/PuerkitoBio/goquery/.gitattributes
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
testdata/* linguist-vendored
|
16
vendor/github.com/PuerkitoBio/goquery/.gitignore
generated
vendored
Normal file
16
vendor/github.com/PuerkitoBio/goquery/.gitignore
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# editor temporary files
|
||||||
|
*.sublime-*
|
||||||
|
.DS_Store
|
||||||
|
*.swp
|
||||||
|
#*.*#
|
||||||
|
tags
|
||||||
|
|
||||||
|
# direnv config
|
||||||
|
.env*
|
||||||
|
|
||||||
|
# test binaries
|
||||||
|
*.test
|
||||||
|
|
||||||
|
# coverage and profilte outputs
|
||||||
|
*.out
|
||||||
|
|
15
vendor/github.com/PuerkitoBio/goquery/.travis.yml
generated
vendored
Normal file
15
vendor/github.com/PuerkitoBio/goquery/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
language: go
|
||||||
|
|
||||||
|
go:
|
||||||
|
- 1.1
|
||||||
|
- 1.2.x
|
||||||
|
- 1.3.x
|
||||||
|
- 1.4.x
|
||||||
|
- 1.5.x
|
||||||
|
- 1.6.x
|
||||||
|
- 1.7.x
|
||||||
|
- 1.8.x
|
||||||
|
- 1.9.x
|
||||||
|
- "1.10.x"
|
||||||
|
- tip
|
||||||
|
|
12
vendor/github.com/PuerkitoBio/goquery/LICENSE
generated
vendored
Normal file
12
vendor/github.com/PuerkitoBio/goquery/LICENSE
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
Copyright (c) 2012-2016, Martin Angers & Contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
177
vendor/github.com/PuerkitoBio/goquery/README.md
generated
vendored
Normal file
177
vendor/github.com/PuerkitoBio/goquery/README.md
generated
vendored
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
# goquery - a little like that j-thing, only in Go
|
||||||
|
[![build status](https://secure.travis-ci.org/PuerkitoBio/goquery.svg?branch=master)](http://travis-ci.org/PuerkitoBio/goquery) [![GoDoc](https://godoc.org/github.com/PuerkitoBio/goquery?status.png)](http://godoc.org/github.com/PuerkitoBio/goquery) [![Sourcegraph Badge](https://sourcegraph.com/github.com/PuerkitoBio/goquery/-/badge.svg)](https://sourcegraph.com/github.com/PuerkitoBio/goquery?badge)
|
||||||
|
|
||||||
|
goquery brings a syntax and a set of features similar to [jQuery][] to the [Go language][go]. It is based on Go's [net/html package][html] and the CSS Selector library [cascadia][]. Since the net/html parser returns nodes, and not a full-featured DOM tree, jQuery's stateful manipulation functions (like height(), css(), detach()) have been left off.
|
||||||
|
|
||||||
|
Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. See the [wiki][] for various options to do this.
|
||||||
|
|
||||||
|
Syntax-wise, it is as close as possible to jQuery, with the same function names when possible, and that warm and fuzzy chainable interface. jQuery being the ultra-popular library that it is, I felt that writing a similar HTML-manipulating library was better to follow its API than to start anew (in the same spirit as Go's `fmt` package), even though some of its methods are less than intuitive (looking at you, [index()][index]...).
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
* [Installation](#installation)
|
||||||
|
* [Changelog](#changelog)
|
||||||
|
* [API](#api)
|
||||||
|
* [Examples](#examples)
|
||||||
|
* [Related Projects](#related-projects)
|
||||||
|
* [Support](#support)
|
||||||
|
* [License](#license)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
Please note that because of the net/html dependency, goquery requires Go1.1+.
|
||||||
|
|
||||||
|
$ go get github.com/PuerkitoBio/goquery
|
||||||
|
|
||||||
|
(optional) To run unit tests:
|
||||||
|
|
||||||
|
$ cd $GOPATH/src/github.com/PuerkitoBio/goquery
|
||||||
|
$ go test
|
||||||
|
|
||||||
|
(optional) To run benchmarks (warning: it runs for a few minutes):
|
||||||
|
|
||||||
|
$ cd $GOPATH/src/github.com/PuerkitoBio/goquery
|
||||||
|
$ go test -bench=".*"
|
||||||
|
|
||||||
|
## Changelog
|
||||||
|
|
||||||
|
**Note that goquery's API is now stable, and will not break.**
|
||||||
|
|
||||||
|
* **2018-06-07 (v1.4.1)** : Add `NewDocumentFromReader` examples.
|
||||||
|
* **2018-03-24 (v1.4.0)** : Deprecate `NewDocument(url)` and `NewDocumentFromResponse(response)`.
|
||||||
|
* **2018-01-28 (v1.3.0)** : Add `ToEnd` constant to `Slice` until the end of the selection (thanks to @davidjwilkins for raising the issue).
|
||||||
|
* **2018-01-11 (v1.2.0)** : Add `AddBack*` and deprecate `AndSelf` (thanks to @davidjwilkins).
|
||||||
|
* **2017-02-12 (v1.1.0)** : Add `SetHtml` and `SetText` (thanks to @glebtv).
|
||||||
|
* **2016-12-29 (v1.0.2)** : Optimize allocations for `Selection.Text` (thanks to @radovskyb).
|
||||||
|
* **2016-08-28 (v1.0.1)** : Optimize performance for large documents.
|
||||||
|
* **2016-07-27 (v1.0.0)** : Tag version 1.0.0.
|
||||||
|
* **2016-06-15** : Invalid selector strings internally compile to a `Matcher` implementation that never matches any node (instead of a panic). So for example, `doc.Find("~")` returns an empty `*Selection` object.
|
||||||
|
* **2016-02-02** : Add `NodeName` utility function similar to the DOM's `nodeName` property. It returns the tag name of the first element in a selection, and other relevant values of non-element nodes (see godoc for details). Add `OuterHtml` utility function similar to the DOM's `outerHTML` property (named `OuterHtml` in small caps for consistency with the existing `Html` method on the `Selection`).
|
||||||
|
* **2015-04-20** : Add `AttrOr` helper method to return the attribute's value or a default value if absent. Thanks to [piotrkowalczuk][piotr].
|
||||||
|
* **2015-02-04** : Add more manipulation functions - Prepend* - thanks again to [Andrew Stone][thatguystone].
|
||||||
|
* **2014-11-28** : Add more manipulation functions - ReplaceWith*, Wrap* and Unwrap - thanks again to [Andrew Stone][thatguystone].
|
||||||
|
* **2014-11-07** : Add manipulation functions (thanks to [Andrew Stone][thatguystone]) and `*Matcher` functions, that receive compiled cascadia selectors instead of selector strings, thus avoiding potential panics thrown by goquery via `cascadia.MustCompile` calls. This results in better performance (selectors can be compiled once and reused) and more idiomatic error handling (you can handle cascadia's compilation errors, instead of recovering from panics, which had been bugging me for a long time). Note that the actual type expected is a `Matcher` interface, that `cascadia.Selector` implements. Other matcher implementations could be used.
|
||||||
|
* **2014-11-06** : Change import paths of net/html to golang.org/x/net/html (see https://groups.google.com/forum/#!topic/golang-nuts/eD8dh3T9yyA). Make sure to update your code to use the new import path too when you call goquery with `html.Node`s.
|
||||||
|
* **v0.3.2** : Add `NewDocumentFromReader()` (thanks jweir) which allows creating a goquery document from an io.Reader.
|
||||||
|
* **v0.3.1** : Add `NewDocumentFromResponse()` (thanks assassingj) which allows creating a goquery document from an http response.
|
||||||
|
* **v0.3.0** : Add `EachWithBreak()` which allows to break out of an `Each()` loop by returning false. This function was added instead of changing the existing `Each()` to avoid breaking compatibility.
|
||||||
|
* **v0.2.1** : Make go-getable, now that [go.net/html is Go1.0-compatible][gonet] (thanks to @matrixik for pointing this out).
|
||||||
|
* **v0.2.0** : Add support for negative indices in Slice(). **BREAKING CHANGE** `Document.Root` is removed, `Document` is now a `Selection` itself (a selection of one, the root element, just like `Document.Root` was before). Add jQuery's Closest() method.
|
||||||
|
* **v0.1.1** : Add benchmarks to use as baseline for refactorings, refactor Next...() and Prev...() methods to use the new html package's linked list features (Next/PrevSibling, FirstChild). Good performance boost (40+% in some cases).
|
||||||
|
* **v0.1.0** : Initial release.
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
goquery exposes two structs, `Document` and `Selection`, and the `Matcher` interface. Unlike jQuery, which is loaded as part of a DOM document, and thus acts on its containing document, goquery doesn't know which HTML document to act upon. So it needs to be told, and that's what the `Document` type is for. It holds the root document node as the initial Selection value to manipulate.
|
||||||
|
|
||||||
|
jQuery often has many variants for the same function (no argument, a selector string argument, a jQuery object argument, a DOM element argument, ...). Instead of exposing the same features in goquery as a single method with variadic empty interface arguments, statically-typed signatures are used following this naming convention:
|
||||||
|
|
||||||
|
* When the jQuery equivalent can be called with no argument, it has the same name as jQuery for the no argument signature (e.g.: `Prev()`), and the version with a selector string argument is called `XxxFiltered()` (e.g.: `PrevFiltered()`)
|
||||||
|
* When the jQuery equivalent **requires** one argument, the same name as jQuery is used for the selector string version (e.g.: `Is()`)
|
||||||
|
* The signatures accepting a jQuery object as argument are defined in goquery as `XxxSelection()` and take a `*Selection` object as argument (e.g.: `FilterSelection()`)
|
||||||
|
* The signatures accepting a DOM element as argument in jQuery are defined in goquery as `XxxNodes()` and take a variadic argument of type `*html.Node` (e.g.: `FilterNodes()`)
|
||||||
|
* The signatures accepting a function as argument in jQuery are defined in goquery as `XxxFunction()` and take a function as argument (e.g.: `FilterFunction()`)
|
||||||
|
* The goquery methods that can be called with a selector string have a corresponding version that take a `Matcher` interface and are defined as `XxxMatcher()` (e.g.: `IsMatcher()`)
|
||||||
|
|
||||||
|
Utility functions that are not in jQuery but are useful in Go are implemented as functions (that take a `*Selection` as parameter), to avoid a potential naming clash on the `*Selection`'s methods (reserved for jQuery-equivalent behaviour).
|
||||||
|
|
||||||
|
The complete [godoc reference documentation can be found here][doc].
|
||||||
|
|
||||||
|
Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string):
|
||||||
|
|
||||||
|
* `Find("~")` returns an empty selection because the selector string doesn't match anything.
|
||||||
|
* `Add("~")` returns a new selection that holds the same nodes as the original selection, because it didn't add any node (selector string didn't match anything).
|
||||||
|
* `ParentsFiltered("~")` returns an empty selection because the selector string doesn't match anything.
|
||||||
|
* `ParentsUntil("~")` returns all parents of the selection because the selector string didn't match any element to stop before the top element.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
See some tips and tricks in the [wiki][].
|
||||||
|
|
||||||
|
Adapted from example_test.go:
|
||||||
|
|
||||||
|
```Go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ExampleScrape() {
|
||||||
|
// Request the HTML page.
|
||||||
|
res, err := http.Get("http://metalsucks.net")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
if res.StatusCode != 200 {
|
||||||
|
log.Fatalf("status code error: %d %s", res.StatusCode, res.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the HTML document
|
||||||
|
doc, err := goquery.NewDocumentFromReader(res.Body)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the review items
|
||||||
|
doc.Find(".sidebar-reviews article .content-block").Each(func(i int, s *goquery.Selection) {
|
||||||
|
// For each item found, get the band and title
|
||||||
|
band := s.Find("a").Text()
|
||||||
|
title := s.Find("i").Text()
|
||||||
|
fmt.Printf("Review %d: %s - %s\n", i, band, title)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
ExampleScrape()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Related Projects
|
||||||
|
|
||||||
|
- [Goq][goq], an HTML deserialization and scraping library based on goquery and struct tags.
|
||||||
|
- [andybalholm/cascadia][cascadia], the CSS selector library used by goquery.
|
||||||
|
- [suntong/cascadia][cascadiacli], a command-line interface to the cascadia CSS selector library, useful to test selectors.
|
||||||
|
- [asciimoo/colly](https://github.com/asciimoo/colly), a lightning fast and elegant Scraping Framework
|
||||||
|
- [gnulnx/goperf](https://github.com/gnulnx/goperf), a website performance test tool that also fetches static assets.
|
||||||
|
|
||||||
|
## Support
|
||||||
|
|
||||||
|
There are a number of ways you can support the project:
|
||||||
|
|
||||||
|
* Use it, star it, build something with it, spread the word!
|
||||||
|
- If you do build something open-source or otherwise publicly-visible, let me know so I can add it to the [Related Projects](#related-projects) section!
|
||||||
|
* Raise issues to improve the project (note: doc typos and clarifications are issues too!)
|
||||||
|
- Please search existing issues before opening a new one - it may have already been adressed.
|
||||||
|
* Pull requests: please discuss new code in an issue first, unless the fix is really trivial.
|
||||||
|
- Make sure new code is tested.
|
||||||
|
- Be mindful of existing code - PRs that break existing code have a high probability of being declined, unless it fixes a serious issue.
|
||||||
|
|
||||||
|
If you desperately want to send money my way, I have a BuyMeACoffee.com page:
|
||||||
|
|
||||||
|
<a href="https://www.buymeacoffee.com/mna" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
The [BSD 3-Clause license][bsd], the same as the [Go language][golic]. Cascadia's license is [here][caslic].
|
||||||
|
|
||||||
|
[jquery]: http://jquery.com/
|
||||||
|
[go]: http://golang.org/
|
||||||
|
[cascadia]: https://github.com/andybalholm/cascadia
|
||||||
|
[cascadiacli]: https://github.com/suntong/cascadia
|
||||||
|
[bsd]: http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
[golic]: http://golang.org/LICENSE
|
||||||
|
[caslic]: https://github.com/andybalholm/cascadia/blob/master/LICENSE
|
||||||
|
[doc]: http://godoc.org/github.com/PuerkitoBio/goquery
|
||||||
|
[index]: http://api.jquery.com/index/
|
||||||
|
[gonet]: https://github.com/golang/net/
|
||||||
|
[html]: http://godoc.org/golang.org/x/net/html
|
||||||
|
[wiki]: https://github.com/PuerkitoBio/goquery/wiki/Tips-and-tricks
|
||||||
|
[thatguystone]: https://github.com/thatguystone
|
||||||
|
[piotr]: https://github.com/piotrkowalczuk
|
||||||
|
[goq]: https://github.com/andrewstuart/goq
|
124
vendor/github.com/PuerkitoBio/goquery/array.go
generated
vendored
Normal file
124
vendor/github.com/PuerkitoBio/goquery/array.go
generated
vendored
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
import (
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
maxUint = ^uint(0)
|
||||||
|
maxInt = int(maxUint >> 1)
|
||||||
|
|
||||||
|
// ToEnd is a special index value that can be used as end index in a call
|
||||||
|
// to Slice so that all elements are selected until the end of the Selection.
|
||||||
|
// It is equivalent to passing (*Selection).Length().
|
||||||
|
ToEnd = maxInt
|
||||||
|
)
|
||||||
|
|
||||||
|
// First reduces the set of matched elements to the first in the set.
|
||||||
|
// It returns a new Selection object, and an empty Selection object if the
|
||||||
|
// the selection is empty.
|
||||||
|
func (s *Selection) First() *Selection {
|
||||||
|
return s.Eq(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Last reduces the set of matched elements to the last in the set.
|
||||||
|
// It returns a new Selection object, and an empty Selection object if
|
||||||
|
// the selection is empty.
|
||||||
|
func (s *Selection) Last() *Selection {
|
||||||
|
return s.Eq(-1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Eq reduces the set of matched elements to the one at the specified index.
|
||||||
|
// If a negative index is given, it counts backwards starting at the end of the
|
||||||
|
// set. It returns a new Selection object, and an empty Selection object if the
|
||||||
|
// index is invalid.
|
||||||
|
func (s *Selection) Eq(index int) *Selection {
|
||||||
|
if index < 0 {
|
||||||
|
index += len(s.Nodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
if index >= len(s.Nodes) || index < 0 {
|
||||||
|
return newEmptySelection(s.document)
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.Slice(index, index+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Slice reduces the set of matched elements to a subset specified by a range
|
||||||
|
// of indices. The start index is 0-based and indicates the index of the first
|
||||||
|
// element to select. The end index is 0-based and indicates the index at which
|
||||||
|
// the elements stop being selected (the end index is not selected).
|
||||||
|
//
|
||||||
|
// The indices may be negative, in which case they represent an offset from the
|
||||||
|
// end of the selection.
|
||||||
|
//
|
||||||
|
// The special value ToEnd may be specified as end index, in which case all elements
|
||||||
|
// until the end are selected. This works both for a positive and negative start
|
||||||
|
// index.
|
||||||
|
func (s *Selection) Slice(start, end int) *Selection {
|
||||||
|
if start < 0 {
|
||||||
|
start += len(s.Nodes)
|
||||||
|
}
|
||||||
|
if end == ToEnd {
|
||||||
|
end = len(s.Nodes)
|
||||||
|
} else if end < 0 {
|
||||||
|
end += len(s.Nodes)
|
||||||
|
}
|
||||||
|
return pushStack(s, s.Nodes[start:end])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get retrieves the underlying node at the specified index.
|
||||||
|
// Get without parameter is not implemented, since the node array is available
|
||||||
|
// on the Selection object.
|
||||||
|
func (s *Selection) Get(index int) *html.Node {
|
||||||
|
if index < 0 {
|
||||||
|
index += len(s.Nodes) // Negative index gets from the end
|
||||||
|
}
|
||||||
|
return s.Nodes[index]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Index returns the position of the first element within the Selection object
|
||||||
|
// relative to its sibling elements.
|
||||||
|
func (s *Selection) Index() int {
|
||||||
|
if len(s.Nodes) > 0 {
|
||||||
|
return newSingleSelection(s.Nodes[0], s.document).PrevAll().Length()
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndexSelector returns the position of the first element within the
|
||||||
|
// Selection object relative to the elements matched by the selector, or -1 if
|
||||||
|
// not found.
|
||||||
|
func (s *Selection) IndexSelector(selector string) int {
|
||||||
|
if len(s.Nodes) > 0 {
|
||||||
|
sel := s.document.Find(selector)
|
||||||
|
return indexInSlice(sel.Nodes, s.Nodes[0])
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndexMatcher returns the position of the first element within the
|
||||||
|
// Selection object relative to the elements matched by the matcher, or -1 if
|
||||||
|
// not found.
|
||||||
|
func (s *Selection) IndexMatcher(m Matcher) int {
|
||||||
|
if len(s.Nodes) > 0 {
|
||||||
|
sel := s.document.FindMatcher(m)
|
||||||
|
return indexInSlice(sel.Nodes, s.Nodes[0])
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndexOfNode returns the position of the specified node within the Selection
|
||||||
|
// object, or -1 if not found.
|
||||||
|
func (s *Selection) IndexOfNode(node *html.Node) int {
|
||||||
|
return indexInSlice(s.Nodes, node)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndexOfSelection returns the position of the first node in the specified
|
||||||
|
// Selection object within this Selection object, or -1 if not found.
|
||||||
|
func (s *Selection) IndexOfSelection(sel *Selection) int {
|
||||||
|
if sel != nil && len(sel.Nodes) > 0 {
|
||||||
|
return indexInSlice(s.Nodes, sel.Nodes[0])
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
123
vendor/github.com/PuerkitoBio/goquery/doc.go
generated
vendored
Normal file
123
vendor/github.com/PuerkitoBio/goquery/doc.go
generated
vendored
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
// Copyright (c) 2012-2016, Martin Angers & Contributors
|
||||||
|
// All rights reserved.
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
// are permitted provided that the following conditions are met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright notice,
|
||||||
|
// this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
// this list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
// other materials provided with the distribution.
|
||||||
|
// * Neither the name of the author nor the names of its contributors may be used to
|
||||||
|
// endorse or promote products derived from this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
|
||||||
|
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||||
|
// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
|
||||||
|
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||||
|
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
|
||||||
|
// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package goquery implements features similar to jQuery, including the chainable
|
||||||
|
syntax, to manipulate and query an HTML document.
|
||||||
|
|
||||||
|
It brings a syntax and a set of features similar to jQuery to the Go language.
|
||||||
|
It is based on Go's net/html package and the CSS Selector library cascadia.
|
||||||
|
Since the net/html parser returns nodes, and not a full-featured DOM
|
||||||
|
tree, jQuery's stateful manipulation functions (like height(), css(), detach())
|
||||||
|
have been left off.
|
||||||
|
|
||||||
|
Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is
|
||||||
|
the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML.
|
||||||
|
See the repository's wiki for various options on how to do this.
|
||||||
|
|
||||||
|
Syntax-wise, it is as close as possible to jQuery, with the same method names when
|
||||||
|
possible, and that warm and fuzzy chainable interface. jQuery being the
|
||||||
|
ultra-popular library that it is, writing a similar HTML-manipulating
|
||||||
|
library was better to follow its API than to start anew (in the same spirit as
|
||||||
|
Go's fmt package), even though some of its methods are less than intuitive (looking
|
||||||
|
at you, index()...).
|
||||||
|
|
||||||
|
It is hosted on GitHub, along with additional documentation in the README.md
|
||||||
|
file: https://github.com/puerkitobio/goquery
|
||||||
|
|
||||||
|
Please note that because of the net/html dependency, goquery requires Go1.1+.
|
||||||
|
|
||||||
|
The various methods are split into files based on the category of behavior.
|
||||||
|
The three dots (...) indicate that various "overloads" are available.
|
||||||
|
|
||||||
|
* array.go : array-like positional manipulation of the selection.
|
||||||
|
- Eq()
|
||||||
|
- First()
|
||||||
|
- Get()
|
||||||
|
- Index...()
|
||||||
|
- Last()
|
||||||
|
- Slice()
|
||||||
|
|
||||||
|
* expand.go : methods that expand or augment the selection's set.
|
||||||
|
- Add...()
|
||||||
|
- AndSelf()
|
||||||
|
- Union(), which is an alias for AddSelection()
|
||||||
|
|
||||||
|
* filter.go : filtering methods, that reduce the selection's set.
|
||||||
|
- End()
|
||||||
|
- Filter...()
|
||||||
|
- Has...()
|
||||||
|
- Intersection(), which is an alias of FilterSelection()
|
||||||
|
- Not...()
|
||||||
|
|
||||||
|
* iteration.go : methods to loop over the selection's nodes.
|
||||||
|
- Each()
|
||||||
|
- EachWithBreak()
|
||||||
|
- Map()
|
||||||
|
|
||||||
|
* manipulation.go : methods for modifying the document
|
||||||
|
- After...()
|
||||||
|
- Append...()
|
||||||
|
- Before...()
|
||||||
|
- Clone()
|
||||||
|
- Empty()
|
||||||
|
- Prepend...()
|
||||||
|
- Remove...()
|
||||||
|
- ReplaceWith...()
|
||||||
|
- Unwrap()
|
||||||
|
- Wrap...()
|
||||||
|
- WrapAll...()
|
||||||
|
- WrapInner...()
|
||||||
|
|
||||||
|
* property.go : methods that inspect and get the node's properties values.
|
||||||
|
- Attr*(), RemoveAttr(), SetAttr()
|
||||||
|
- AddClass(), HasClass(), RemoveClass(), ToggleClass()
|
||||||
|
- Html()
|
||||||
|
- Length()
|
||||||
|
- Size(), which is an alias for Length()
|
||||||
|
- Text()
|
||||||
|
|
||||||
|
* query.go : methods that query, or reflect, a node's identity.
|
||||||
|
- Contains()
|
||||||
|
- Is...()
|
||||||
|
|
||||||
|
* traversal.go : methods to traverse the HTML document tree.
|
||||||
|
- Children...()
|
||||||
|
- Contents()
|
||||||
|
- Find...()
|
||||||
|
- Next...()
|
||||||
|
- Parent[s]...()
|
||||||
|
- Prev...()
|
||||||
|
- Siblings...()
|
||||||
|
|
||||||
|
* type.go : definition of the types exposed by goquery.
|
||||||
|
- Document
|
||||||
|
- Selection
|
||||||
|
- Matcher
|
||||||
|
|
||||||
|
* utilities.go : definition of helper functions (and not methods on a *Selection)
|
||||||
|
that are not part of jQuery, but are useful to goquery.
|
||||||
|
- NodeName
|
||||||
|
- OuterHtml
|
||||||
|
*/
|
||||||
|
package goquery
|
70
vendor/github.com/PuerkitoBio/goquery/expand.go
generated
vendored
Normal file
70
vendor/github.com/PuerkitoBio/goquery/expand.go
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
import "golang.org/x/net/html"
|
||||||
|
|
||||||
|
// Add adds the selector string's matching nodes to those in the current
|
||||||
|
// selection and returns a new Selection object.
|
||||||
|
// The selector string is run in the context of the document of the current
|
||||||
|
// Selection object.
|
||||||
|
func (s *Selection) Add(selector string) *Selection {
|
||||||
|
return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, compileMatcher(selector))...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddMatcher adds the matcher's matching nodes to those in the current
|
||||||
|
// selection and returns a new Selection object.
|
||||||
|
// The matcher is run in the context of the document of the current
|
||||||
|
// Selection object.
|
||||||
|
func (s *Selection) AddMatcher(m Matcher) *Selection {
|
||||||
|
return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, m)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddSelection adds the specified Selection object's nodes to those in the
|
||||||
|
// current selection and returns a new Selection object.
|
||||||
|
func (s *Selection) AddSelection(sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return s.AddNodes()
|
||||||
|
}
|
||||||
|
return s.AddNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Union is an alias for AddSelection.
|
||||||
|
func (s *Selection) Union(sel *Selection) *Selection {
|
||||||
|
return s.AddSelection(sel)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddNodes adds the specified nodes to those in the
|
||||||
|
// current selection and returns a new Selection object.
|
||||||
|
func (s *Selection) AddNodes(nodes ...*html.Node) *Selection {
|
||||||
|
return pushStack(s, appendWithoutDuplicates(s.Nodes, nodes, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AndSelf adds the previous set of elements on the stack to the current set.
|
||||||
|
// It returns a new Selection object containing the current Selection combined
|
||||||
|
// with the previous one.
|
||||||
|
// Deprecated: This function has been deprecated and is now an alias for AddBack().
|
||||||
|
func (s *Selection) AndSelf() *Selection {
|
||||||
|
return s.AddBack()
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddBack adds the previous set of elements on the stack to the current set.
|
||||||
|
// It returns a new Selection object containing the current Selection combined
|
||||||
|
// with the previous one.
|
||||||
|
func (s *Selection) AddBack() *Selection {
|
||||||
|
return s.AddSelection(s.prevSel)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddBackFiltered reduces the previous set of elements on the stack to those that
|
||||||
|
// match the selector string, and adds them to the current set.
|
||||||
|
// It returns a new Selection object containing the current Selection combined
|
||||||
|
// with the filtered previous one
|
||||||
|
func (s *Selection) AddBackFiltered(selector string) *Selection {
|
||||||
|
return s.AddSelection(s.prevSel.Filter(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddBackMatcher reduces the previous set of elements on the stack to those that match
|
||||||
|
// the mateher, and adds them to the curernt set.
|
||||||
|
// It returns a new Selection object containing the current Selection combined
|
||||||
|
// with the filtered previous one
|
||||||
|
func (s *Selection) AddBackMatcher(m Matcher) *Selection {
|
||||||
|
return s.AddSelection(s.prevSel.FilterMatcher(m))
|
||||||
|
}
|
163
vendor/github.com/PuerkitoBio/goquery/filter.go
generated
vendored
Normal file
163
vendor/github.com/PuerkitoBio/goquery/filter.go
generated
vendored
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
import "golang.org/x/net/html"
|
||||||
|
|
||||||
|
// Filter reduces the set of matched elements to those that match the selector string.
|
||||||
|
// It returns a new Selection object for this subset of matching elements.
|
||||||
|
func (s *Selection) Filter(selector string) *Selection {
|
||||||
|
return s.FilterMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilterMatcher reduces the set of matched elements to those that match
|
||||||
|
// the given matcher. It returns a new Selection object for this subset
|
||||||
|
// of matching elements.
|
||||||
|
func (s *Selection) FilterMatcher(m Matcher) *Selection {
|
||||||
|
return pushStack(s, winnow(s, m, true))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not removes elements from the Selection that match the selector string.
|
||||||
|
// It returns a new Selection object with the matching elements removed.
|
||||||
|
func (s *Selection) Not(selector string) *Selection {
|
||||||
|
return s.NotMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NotMatcher removes elements from the Selection that match the given matcher.
|
||||||
|
// It returns a new Selection object with the matching elements removed.
|
||||||
|
func (s *Selection) NotMatcher(m Matcher) *Selection {
|
||||||
|
return pushStack(s, winnow(s, m, false))
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilterFunction reduces the set of matched elements to those that pass the function's test.
|
||||||
|
// It returns a new Selection object for this subset of elements.
|
||||||
|
func (s *Selection) FilterFunction(f func(int, *Selection) bool) *Selection {
|
||||||
|
return pushStack(s, winnowFunction(s, f, true))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NotFunction removes elements from the Selection that pass the function's test.
|
||||||
|
// It returns a new Selection object with the matching elements removed.
|
||||||
|
func (s *Selection) NotFunction(f func(int, *Selection) bool) *Selection {
|
||||||
|
return pushStack(s, winnowFunction(s, f, false))
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilterNodes reduces the set of matched elements to those that match the specified nodes.
|
||||||
|
// It returns a new Selection object for this subset of elements.
|
||||||
|
func (s *Selection) FilterNodes(nodes ...*html.Node) *Selection {
|
||||||
|
return pushStack(s, winnowNodes(s, nodes, true))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NotNodes removes elements from the Selection that match the specified nodes.
|
||||||
|
// It returns a new Selection object with the matching elements removed.
|
||||||
|
func (s *Selection) NotNodes(nodes ...*html.Node) *Selection {
|
||||||
|
return pushStack(s, winnowNodes(s, nodes, false))
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilterSelection reduces the set of matched elements to those that match a
|
||||||
|
// node in the specified Selection object.
|
||||||
|
// It returns a new Selection object for this subset of elements.
|
||||||
|
func (s *Selection) FilterSelection(sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return pushStack(s, winnowNodes(s, nil, true))
|
||||||
|
}
|
||||||
|
return pushStack(s, winnowNodes(s, sel.Nodes, true))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NotSelection removes elements from the Selection that match a node in the specified
|
||||||
|
// Selection object. It returns a new Selection object with the matching elements removed.
|
||||||
|
func (s *Selection) NotSelection(sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return pushStack(s, winnowNodes(s, nil, false))
|
||||||
|
}
|
||||||
|
return pushStack(s, winnowNodes(s, sel.Nodes, false))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Intersection is an alias for FilterSelection.
|
||||||
|
func (s *Selection) Intersection(sel *Selection) *Selection {
|
||||||
|
return s.FilterSelection(sel)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Has reduces the set of matched elements to those that have a descendant
|
||||||
|
// that matches the selector.
|
||||||
|
// It returns a new Selection object with the matching elements.
|
||||||
|
func (s *Selection) Has(selector string) *Selection {
|
||||||
|
return s.HasSelection(s.document.Find(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasMatcher reduces the set of matched elements to those that have a descendant
|
||||||
|
// that matches the matcher.
|
||||||
|
// It returns a new Selection object with the matching elements.
|
||||||
|
func (s *Selection) HasMatcher(m Matcher) *Selection {
|
||||||
|
return s.HasSelection(s.document.FindMatcher(m))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasNodes reduces the set of matched elements to those that have a
|
||||||
|
// descendant that matches one of the nodes.
|
||||||
|
// It returns a new Selection object with the matching elements.
|
||||||
|
func (s *Selection) HasNodes(nodes ...*html.Node) *Selection {
|
||||||
|
return s.FilterFunction(func(_ int, sel *Selection) bool {
|
||||||
|
// Add all nodes that contain one of the specified nodes
|
||||||
|
for _, n := range nodes {
|
||||||
|
if sel.Contains(n) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasSelection reduces the set of matched elements to those that have a
|
||||||
|
// descendant that matches one of the nodes of the specified Selection object.
|
||||||
|
// It returns a new Selection object with the matching elements.
|
||||||
|
func (s *Selection) HasSelection(sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return s.HasNodes()
|
||||||
|
}
|
||||||
|
return s.HasNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// End ends the most recent filtering operation in the current chain and
|
||||||
|
// returns the set of matched elements to its previous state.
|
||||||
|
func (s *Selection) End() *Selection {
|
||||||
|
if s.prevSel != nil {
|
||||||
|
return s.prevSel
|
||||||
|
}
|
||||||
|
return newEmptySelection(s.document)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter based on the matcher, and the indicator to keep (Filter) or
|
||||||
|
// to get rid of (Not) the matching elements.
|
||||||
|
func winnow(sel *Selection, m Matcher, keep bool) []*html.Node {
|
||||||
|
// Optimize if keep is requested
|
||||||
|
if keep {
|
||||||
|
return m.Filter(sel.Nodes)
|
||||||
|
}
|
||||||
|
// Use grep
|
||||||
|
return grep(sel, func(i int, s *Selection) bool {
|
||||||
|
return !m.Match(s.Get(0))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter based on an array of nodes, and the indicator to keep (Filter) or
|
||||||
|
// to get rid of (Not) the matching elements.
|
||||||
|
func winnowNodes(sel *Selection, nodes []*html.Node, keep bool) []*html.Node {
|
||||||
|
if len(nodes)+len(sel.Nodes) < minNodesForSet {
|
||||||
|
return grep(sel, func(i int, s *Selection) bool {
|
||||||
|
return isInSlice(nodes, s.Get(0)) == keep
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
set := make(map[*html.Node]bool)
|
||||||
|
for _, n := range nodes {
|
||||||
|
set[n] = true
|
||||||
|
}
|
||||||
|
return grep(sel, func(i int, s *Selection) bool {
|
||||||
|
return set[s.Get(0)] == keep
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter based on a function test, and the indicator to keep (Filter) or
|
||||||
|
// to get rid of (Not) the matching elements.
|
||||||
|
func winnowFunction(sel *Selection, f func(int, *Selection) bool, keep bool) []*html.Node {
|
||||||
|
return grep(sel, func(i int, s *Selection) bool {
|
||||||
|
return f(i, s) == keep
|
||||||
|
})
|
||||||
|
}
|
39
vendor/github.com/PuerkitoBio/goquery/iteration.go
generated
vendored
Normal file
39
vendor/github.com/PuerkitoBio/goquery/iteration.go
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
// Each iterates over a Selection object, executing a function for each
|
||||||
|
// matched element. It returns the current Selection object. The function
|
||||||
|
// f is called for each element in the selection with the index of the
|
||||||
|
// element in that selection starting at 0, and a *Selection that contains
|
||||||
|
// only that element.
|
||||||
|
func (s *Selection) Each(f func(int, *Selection)) *Selection {
|
||||||
|
for i, n := range s.Nodes {
|
||||||
|
f(i, newSingleSelection(n, s.document))
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// EachWithBreak iterates over a Selection object, executing a function for each
|
||||||
|
// matched element. It is identical to Each except that it is possible to break
|
||||||
|
// out of the loop by returning false in the callback function. It returns the
|
||||||
|
// current Selection object.
|
||||||
|
func (s *Selection) EachWithBreak(f func(int, *Selection) bool) *Selection {
|
||||||
|
for i, n := range s.Nodes {
|
||||||
|
if !f(i, newSingleSelection(n, s.document)) {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map passes each element in the current matched set through a function,
|
||||||
|
// producing a slice of string holding the returned values. The function
|
||||||
|
// f is called for each element in the selection with the index of the
|
||||||
|
// element in that selection starting at 0, and a *Selection that contains
|
||||||
|
// only that element.
|
||||||
|
func (s *Selection) Map(f func(int, *Selection) string) (result []string) {
|
||||||
|
for i, n := range s.Nodes {
|
||||||
|
result = append(result, f(i, newSingleSelection(n, s.document)))
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
574
vendor/github.com/PuerkitoBio/goquery/manipulation.go
generated
vendored
Normal file
574
vendor/github.com/PuerkitoBio/goquery/manipulation.go
generated
vendored
Normal file
@ -0,0 +1,574 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
// After applies the selector from the root document and inserts the matched elements
|
||||||
|
// after the elements in the set of matched elements.
|
||||||
|
//
|
||||||
|
// If one of the matched elements in the selection is not currently in the
|
||||||
|
// document, it's impossible to insert nodes after it, so it will be ignored.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) After(selector string) *Selection {
|
||||||
|
return s.AfterMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AfterMatcher applies the matcher from the root document and inserts the matched elements
|
||||||
|
// after the elements in the set of matched elements.
|
||||||
|
//
|
||||||
|
// If one of the matched elements in the selection is not currently in the
|
||||||
|
// document, it's impossible to insert nodes after it, so it will be ignored.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) AfterMatcher(m Matcher) *Selection {
|
||||||
|
return s.AfterNodes(m.MatchAll(s.document.rootNode)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AfterSelection inserts the elements in the selection after each element in the set of matched
|
||||||
|
// elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) AfterSelection(sel *Selection) *Selection {
|
||||||
|
return s.AfterNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AfterHtml parses the html and inserts it after the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) AfterHtml(html string) *Selection {
|
||||||
|
return s.AfterNodes(parseHtml(html)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AfterNodes inserts the nodes after each element in the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) AfterNodes(ns ...*html.Node) *Selection {
|
||||||
|
return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
|
||||||
|
if sn.Parent != nil {
|
||||||
|
sn.Parent.InsertBefore(n, sn.NextSibling)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append appends the elements specified by the selector to the end of each element
|
||||||
|
// in the set of matched elements, following those rules:
|
||||||
|
//
|
||||||
|
// 1) The selector is applied to the root document.
|
||||||
|
//
|
||||||
|
// 2) Elements that are part of the document will be moved to the new location.
|
||||||
|
//
|
||||||
|
// 3) If there are multiple locations to append to, cloned nodes will be
|
||||||
|
// appended to all target locations except the last one, which will be moved
|
||||||
|
// as noted in (2).
|
||||||
|
func (s *Selection) Append(selector string) *Selection {
|
||||||
|
return s.AppendMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppendMatcher appends the elements specified by the matcher to the end of each element
|
||||||
|
// in the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) AppendMatcher(m Matcher) *Selection {
|
||||||
|
return s.AppendNodes(m.MatchAll(s.document.rootNode)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppendSelection appends the elements in the selection to the end of each element
|
||||||
|
// in the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) AppendSelection(sel *Selection) *Selection {
|
||||||
|
return s.AppendNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppendHtml parses the html and appends it to the set of matched elements.
|
||||||
|
func (s *Selection) AppendHtml(html string) *Selection {
|
||||||
|
return s.AppendNodes(parseHtml(html)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppendNodes appends the specified nodes to each node in the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) AppendNodes(ns ...*html.Node) *Selection {
|
||||||
|
return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
|
||||||
|
sn.AppendChild(n)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Before inserts the matched elements before each element in the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) Before(selector string) *Selection {
|
||||||
|
return s.BeforeMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// BeforeMatcher inserts the matched elements before each element in the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) BeforeMatcher(m Matcher) *Selection {
|
||||||
|
return s.BeforeNodes(m.MatchAll(s.document.rootNode)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BeforeSelection inserts the elements in the selection before each element in the set of matched
|
||||||
|
// elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) BeforeSelection(sel *Selection) *Selection {
|
||||||
|
return s.BeforeNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BeforeHtml parses the html and inserts it before the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) BeforeHtml(html string) *Selection {
|
||||||
|
return s.BeforeNodes(parseHtml(html)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BeforeNodes inserts the nodes before each element in the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) BeforeNodes(ns ...*html.Node) *Selection {
|
||||||
|
return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
|
||||||
|
if sn.Parent != nil {
|
||||||
|
sn.Parent.InsertBefore(n, sn)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone creates a deep copy of the set of matched nodes. The new nodes will not be
|
||||||
|
// attached to the document.
|
||||||
|
func (s *Selection) Clone() *Selection {
|
||||||
|
ns := newEmptySelection(s.document)
|
||||||
|
ns.Nodes = cloneNodes(s.Nodes)
|
||||||
|
return ns
|
||||||
|
}
|
||||||
|
|
||||||
|
// Empty removes all children nodes from the set of matched elements.
|
||||||
|
// It returns the children nodes in a new Selection.
|
||||||
|
func (s *Selection) Empty() *Selection {
|
||||||
|
var nodes []*html.Node
|
||||||
|
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
for c := n.FirstChild; c != nil; c = n.FirstChild {
|
||||||
|
n.RemoveChild(c)
|
||||||
|
nodes = append(nodes, c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pushStack(s, nodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepend prepends the elements specified by the selector to each element in
|
||||||
|
// the set of matched elements, following the same rules as Append.
|
||||||
|
func (s *Selection) Prepend(selector string) *Selection {
|
||||||
|
return s.PrependMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrependMatcher prepends the elements specified by the matcher to each
|
||||||
|
// element in the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) PrependMatcher(m Matcher) *Selection {
|
||||||
|
return s.PrependNodes(m.MatchAll(s.document.rootNode)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrependSelection prepends the elements in the selection to each element in
|
||||||
|
// the set of matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) PrependSelection(sel *Selection) *Selection {
|
||||||
|
return s.PrependNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrependHtml parses the html and prepends it to the set of matched elements.
|
||||||
|
func (s *Selection) PrependHtml(html string) *Selection {
|
||||||
|
return s.PrependNodes(parseHtml(html)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrependNodes prepends the specified nodes to each node in the set of
|
||||||
|
// matched elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) PrependNodes(ns ...*html.Node) *Selection {
|
||||||
|
return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
|
||||||
|
// sn.FirstChild may be nil, in which case this functions like
|
||||||
|
// sn.AppendChild()
|
||||||
|
sn.InsertBefore(n, sn.FirstChild)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove removes the set of matched elements from the document.
|
||||||
|
// It returns the same selection, now consisting of nodes not in the document.
|
||||||
|
func (s *Selection) Remove() *Selection {
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
if n.Parent != nil {
|
||||||
|
n.Parent.RemoveChild(n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveFiltered removes the set of matched elements by selector.
|
||||||
|
// It returns the Selection of removed nodes.
|
||||||
|
func (s *Selection) RemoveFiltered(selector string) *Selection {
|
||||||
|
return s.RemoveMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveMatcher removes the set of matched elements.
|
||||||
|
// It returns the Selection of removed nodes.
|
||||||
|
func (s *Selection) RemoveMatcher(m Matcher) *Selection {
|
||||||
|
return s.FilterMatcher(m).Remove()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReplaceWith replaces each element in the set of matched elements with the
|
||||||
|
// nodes matched by the given selector.
|
||||||
|
// It returns the removed elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) ReplaceWith(selector string) *Selection {
|
||||||
|
return s.ReplaceWithMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReplaceWithMatcher replaces each element in the set of matched elements with
|
||||||
|
// the nodes matched by the given Matcher.
|
||||||
|
// It returns the removed elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) ReplaceWithMatcher(m Matcher) *Selection {
|
||||||
|
return s.ReplaceWithNodes(m.MatchAll(s.document.rootNode)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReplaceWithSelection replaces each element in the set of matched elements with
|
||||||
|
// the nodes from the given Selection.
|
||||||
|
// It returns the removed elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) ReplaceWithSelection(sel *Selection) *Selection {
|
||||||
|
return s.ReplaceWithNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReplaceWithHtml replaces each element in the set of matched elements with
|
||||||
|
// the parsed HTML.
|
||||||
|
// It returns the removed elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) ReplaceWithHtml(html string) *Selection {
|
||||||
|
return s.ReplaceWithNodes(parseHtml(html)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReplaceWithNodes replaces each element in the set of matched elements with
|
||||||
|
// the given nodes.
|
||||||
|
// It returns the removed elements.
|
||||||
|
//
|
||||||
|
// This follows the same rules as Selection.Append.
|
||||||
|
func (s *Selection) ReplaceWithNodes(ns ...*html.Node) *Selection {
|
||||||
|
s.AfterNodes(ns...)
|
||||||
|
return s.Remove()
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetHtml sets the html content of each element in the selection to
|
||||||
|
// specified html string.
|
||||||
|
func (s *Selection) SetHtml(html string) *Selection {
|
||||||
|
return setHtmlNodes(s, parseHtml(html)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetText sets the content of each element in the selection to specified content.
|
||||||
|
// The provided text string is escaped.
|
||||||
|
func (s *Selection) SetText(text string) *Selection {
|
||||||
|
return s.SetHtml(html.EscapeString(text))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap removes the parents of the set of matched elements, leaving the matched
|
||||||
|
// elements (and their siblings, if any) in their place.
|
||||||
|
// It returns the original selection.
|
||||||
|
func (s *Selection) Unwrap() *Selection {
|
||||||
|
s.Parent().Each(func(i int, ss *Selection) {
|
||||||
|
// For some reason, jquery allows unwrap to remove the <head> element, so
|
||||||
|
// allowing it here too. Same for <html>. Why it allows those elements to
|
||||||
|
// be unwrapped while not allowing body is a mystery to me.
|
||||||
|
if ss.Nodes[0].Data != "body" {
|
||||||
|
ss.ReplaceWithSelection(ss.Contents())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrap wraps each element in the set of matched elements inside the first
|
||||||
|
// element matched by the given selector. The matched child is cloned before
|
||||||
|
// being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) Wrap(selector string) *Selection {
|
||||||
|
return s.WrapMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapMatcher wraps each element in the set of matched elements inside the
|
||||||
|
// first element matched by the given matcher. The matched child is cloned
|
||||||
|
// before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapMatcher(m Matcher) *Selection {
|
||||||
|
return s.wrapNodes(m.MatchAll(s.document.rootNode)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapSelection wraps each element in the set of matched elements inside the
|
||||||
|
// first element in the given Selection. The element is cloned before being
|
||||||
|
// inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapSelection(sel *Selection) *Selection {
|
||||||
|
return s.wrapNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapHtml wraps each element in the set of matched elements inside the inner-
|
||||||
|
// most child of the given HTML.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapHtml(html string) *Selection {
|
||||||
|
return s.wrapNodes(parseHtml(html)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapNode wraps each element in the set of matched elements inside the inner-
|
||||||
|
// most child of the given node. The given node is copied before being inserted
|
||||||
|
// into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapNode(n *html.Node) *Selection {
|
||||||
|
return s.wrapNodes(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Selection) wrapNodes(ns ...*html.Node) *Selection {
|
||||||
|
s.Each(func(i int, ss *Selection) {
|
||||||
|
ss.wrapAllNodes(ns...)
|
||||||
|
})
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapAll wraps a single HTML structure, matched by the given selector, around
|
||||||
|
// all elements in the set of matched elements. The matched child is cloned
|
||||||
|
// before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapAll(selector string) *Selection {
|
||||||
|
return s.WrapAllMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapAllMatcher wraps a single HTML structure, matched by the given Matcher,
|
||||||
|
// around all elements in the set of matched elements. The matched child is
|
||||||
|
// cloned before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapAllMatcher(m Matcher) *Selection {
|
||||||
|
return s.wrapAllNodes(m.MatchAll(s.document.rootNode)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapAllSelection wraps a single HTML structure, the first node of the given
|
||||||
|
// Selection, around all elements in the set of matched elements. The matched
|
||||||
|
// child is cloned before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapAllSelection(sel *Selection) *Selection {
|
||||||
|
return s.wrapAllNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapAllHtml wraps the given HTML structure around all elements in the set of
|
||||||
|
// matched elements. The matched child is cloned before being inserted into the
|
||||||
|
// document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapAllHtml(html string) *Selection {
|
||||||
|
return s.wrapAllNodes(parseHtml(html)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Selection) wrapAllNodes(ns ...*html.Node) *Selection {
|
||||||
|
if len(ns) > 0 {
|
||||||
|
return s.WrapAllNode(ns[0])
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapAllNode wraps the given node around the first element in the Selection,
|
||||||
|
// making all other nodes in the Selection children of the given node. The node
|
||||||
|
// is cloned before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapAllNode(n *html.Node) *Selection {
|
||||||
|
if s.Size() == 0 {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
wrap := cloneNode(n)
|
||||||
|
|
||||||
|
first := s.Nodes[0]
|
||||||
|
if first.Parent != nil {
|
||||||
|
first.Parent.InsertBefore(wrap, first)
|
||||||
|
first.Parent.RemoveChild(first)
|
||||||
|
}
|
||||||
|
|
||||||
|
for c := getFirstChildEl(wrap); c != nil; c = getFirstChildEl(wrap) {
|
||||||
|
wrap = c
|
||||||
|
}
|
||||||
|
|
||||||
|
newSingleSelection(wrap, s.document).AppendSelection(s)
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapInner wraps an HTML structure, matched by the given selector, around the
|
||||||
|
// content of element in the set of matched elements. The matched child is
|
||||||
|
// cloned before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapInner(selector string) *Selection {
|
||||||
|
return s.WrapInnerMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapInnerMatcher wraps an HTML structure, matched by the given selector,
|
||||||
|
// around the content of element in the set of matched elements. The matched
|
||||||
|
// child is cloned before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapInnerMatcher(m Matcher) *Selection {
|
||||||
|
return s.wrapInnerNodes(m.MatchAll(s.document.rootNode)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapInnerSelection wraps an HTML structure, matched by the given selector,
|
||||||
|
// around the content of element in the set of matched elements. The matched
|
||||||
|
// child is cloned before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapInnerSelection(sel *Selection) *Selection {
|
||||||
|
return s.wrapInnerNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapInnerHtml wraps an HTML structure, matched by the given selector, around
|
||||||
|
// the content of element in the set of matched elements. The matched child is
|
||||||
|
// cloned before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapInnerHtml(html string) *Selection {
|
||||||
|
return s.wrapInnerNodes(parseHtml(html)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapInnerNode wraps an HTML structure, matched by the given selector, around
|
||||||
|
// the content of element in the set of matched elements. The matched child is
|
||||||
|
// cloned before being inserted into the document.
|
||||||
|
//
|
||||||
|
// It returns the original set of elements.
|
||||||
|
func (s *Selection) WrapInnerNode(n *html.Node) *Selection {
|
||||||
|
return s.wrapInnerNodes(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Selection) wrapInnerNodes(ns ...*html.Node) *Selection {
|
||||||
|
if len(ns) == 0 {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
s.Each(func(i int, s *Selection) {
|
||||||
|
contents := s.Contents()
|
||||||
|
|
||||||
|
if contents.Size() > 0 {
|
||||||
|
contents.wrapAllNodes(ns...)
|
||||||
|
} else {
|
||||||
|
s.AppendNodes(cloneNode(ns[0]))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseHtml(h string) []*html.Node {
|
||||||
|
// Errors are only returned when the io.Reader returns any error besides
|
||||||
|
// EOF, but strings.Reader never will
|
||||||
|
nodes, err := html.ParseFragment(strings.NewReader(h), &html.Node{Type: html.ElementNode})
|
||||||
|
if err != nil {
|
||||||
|
panic("goquery: failed to parse HTML: " + err.Error())
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
func setHtmlNodes(s *Selection, ns ...*html.Node) *Selection {
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
for c := n.FirstChild; c != nil; c = n.FirstChild {
|
||||||
|
n.RemoveChild(c)
|
||||||
|
}
|
||||||
|
for _, c := range ns {
|
||||||
|
n.AppendChild(cloneNode(c))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the first child that is an ElementNode
|
||||||
|
func getFirstChildEl(n *html.Node) *html.Node {
|
||||||
|
c := n.FirstChild
|
||||||
|
for c != nil && c.Type != html.ElementNode {
|
||||||
|
c = c.NextSibling
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deep copy a slice of nodes.
|
||||||
|
func cloneNodes(ns []*html.Node) []*html.Node {
|
||||||
|
cns := make([]*html.Node, 0, len(ns))
|
||||||
|
|
||||||
|
for _, n := range ns {
|
||||||
|
cns = append(cns, cloneNode(n))
|
||||||
|
}
|
||||||
|
|
||||||
|
return cns
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deep copy a node. The new node has clones of all the original node's
|
||||||
|
// children but none of its parents or siblings.
|
||||||
|
func cloneNode(n *html.Node) *html.Node {
|
||||||
|
nn := &html.Node{
|
||||||
|
Type: n.Type,
|
||||||
|
DataAtom: n.DataAtom,
|
||||||
|
Data: n.Data,
|
||||||
|
Attr: make([]html.Attribute, len(n.Attr)),
|
||||||
|
}
|
||||||
|
|
||||||
|
copy(nn.Attr, n.Attr)
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
nn.AppendChild(cloneNode(c))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nn
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Selection) manipulateNodes(ns []*html.Node, reverse bool,
|
||||||
|
f func(sn *html.Node, n *html.Node)) *Selection {
|
||||||
|
|
||||||
|
lasti := s.Size() - 1
|
||||||
|
|
||||||
|
// net.Html doesn't provide document fragments for insertion, so to get
|
||||||
|
// things in the correct order with After() and Prepend(), the callback
|
||||||
|
// needs to be called on the reverse of the nodes.
|
||||||
|
if reverse {
|
||||||
|
for i, j := 0, len(ns)-1; i < j; i, j = i+1, j-1 {
|
||||||
|
ns[i], ns[j] = ns[j], ns[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, sn := range s.Nodes {
|
||||||
|
for _, n := range ns {
|
||||||
|
if i != lasti {
|
||||||
|
f(sn, cloneNode(n))
|
||||||
|
} else {
|
||||||
|
if n.Parent != nil {
|
||||||
|
n.Parent.RemoveChild(n)
|
||||||
|
}
|
||||||
|
f(sn, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
275
vendor/github.com/PuerkitoBio/goquery/property.go
generated
vendored
Normal file
275
vendor/github.com/PuerkitoBio/goquery/property.go
generated
vendored
Normal file
@ -0,0 +1,275 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
var rxClassTrim = regexp.MustCompile("[\t\r\n]")
|
||||||
|
|
||||||
|
// Attr gets the specified attribute's value for the first element in the
|
||||||
|
// Selection. To get the value for each element individually, use a looping
|
||||||
|
// construct such as Each or Map method.
|
||||||
|
func (s *Selection) Attr(attrName string) (val string, exists bool) {
|
||||||
|
if len(s.Nodes) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return getAttributeValue(attrName, s.Nodes[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttrOr works like Attr but returns default value if attribute is not present.
|
||||||
|
func (s *Selection) AttrOr(attrName, defaultValue string) string {
|
||||||
|
if len(s.Nodes) == 0 {
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
val, exists := getAttributeValue(attrName, s.Nodes[0])
|
||||||
|
if !exists {
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAttr removes the named attribute from each element in the set of matched elements.
|
||||||
|
func (s *Selection) RemoveAttr(attrName string) *Selection {
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
removeAttr(n, attrName)
|
||||||
|
}
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetAttr sets the given attribute on each element in the set of matched elements.
|
||||||
|
func (s *Selection) SetAttr(attrName, val string) *Selection {
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
attr := getAttributePtr(attrName, n)
|
||||||
|
if attr == nil {
|
||||||
|
n.Attr = append(n.Attr, html.Attribute{Key: attrName, Val: val})
|
||||||
|
} else {
|
||||||
|
attr.Val = val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Text gets the combined text contents of each element in the set of matched
|
||||||
|
// elements, including their descendants.
|
||||||
|
func (s *Selection) Text() string {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
|
||||||
|
// Slightly optimized vs calling Each: no single selection object created
|
||||||
|
var f func(*html.Node)
|
||||||
|
f = func(n *html.Node) {
|
||||||
|
if n.Type == html.TextNode {
|
||||||
|
// Keep newlines and spaces, like jQuery
|
||||||
|
buf.WriteString(n.Data)
|
||||||
|
}
|
||||||
|
if n.FirstChild != nil {
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
f(c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
f(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Size is an alias for Length.
|
||||||
|
func (s *Selection) Size() int {
|
||||||
|
return s.Length()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Length returns the number of elements in the Selection object.
|
||||||
|
func (s *Selection) Length() int {
|
||||||
|
return len(s.Nodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Html gets the HTML contents of the first element in the set of matched
|
||||||
|
// elements. It includes text and comment nodes.
|
||||||
|
func (s *Selection) Html() (ret string, e error) {
|
||||||
|
// Since there is no .innerHtml, the HTML content must be re-created from
|
||||||
|
// the nodes using html.Render.
|
||||||
|
var buf bytes.Buffer
|
||||||
|
|
||||||
|
if len(s.Nodes) > 0 {
|
||||||
|
for c := s.Nodes[0].FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
e = html.Render(&buf, c)
|
||||||
|
if e != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ret = buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddClass adds the given class(es) to each element in the set of matched elements.
|
||||||
|
// Multiple class names can be specified, separated by a space or via multiple arguments.
|
||||||
|
func (s *Selection) AddClass(class ...string) *Selection {
|
||||||
|
classStr := strings.TrimSpace(strings.Join(class, " "))
|
||||||
|
|
||||||
|
if classStr == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
tcls := getClassesSlice(classStr)
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
curClasses, attr := getClassesAndAttr(n, true)
|
||||||
|
for _, newClass := range tcls {
|
||||||
|
if !strings.Contains(curClasses, " "+newClass+" ") {
|
||||||
|
curClasses += newClass + " "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setClasses(n, attr, curClasses)
|
||||||
|
}
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasClass determines whether any of the matched elements are assigned the
|
||||||
|
// given class.
|
||||||
|
func (s *Selection) HasClass(class string) bool {
|
||||||
|
class = " " + class + " "
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
classes, _ := getClassesAndAttr(n, false)
|
||||||
|
if strings.Contains(classes, class) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveClass removes the given class(es) from each element in the set of matched elements.
|
||||||
|
// Multiple class names can be specified, separated by a space or via multiple arguments.
|
||||||
|
// If no class name is provided, all classes are removed.
|
||||||
|
func (s *Selection) RemoveClass(class ...string) *Selection {
|
||||||
|
var rclasses []string
|
||||||
|
|
||||||
|
classStr := strings.TrimSpace(strings.Join(class, " "))
|
||||||
|
remove := classStr == ""
|
||||||
|
|
||||||
|
if !remove {
|
||||||
|
rclasses = getClassesSlice(classStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
if remove {
|
||||||
|
removeAttr(n, "class")
|
||||||
|
} else {
|
||||||
|
classes, attr := getClassesAndAttr(n, true)
|
||||||
|
for _, rcl := range rclasses {
|
||||||
|
classes = strings.Replace(classes, " "+rcl+" ", " ", -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
setClasses(n, attr, classes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToggleClass adds or removes the given class(es) for each element in the set of matched elements.
|
||||||
|
// Multiple class names can be specified, separated by a space or via multiple arguments.
|
||||||
|
func (s *Selection) ToggleClass(class ...string) *Selection {
|
||||||
|
classStr := strings.TrimSpace(strings.Join(class, " "))
|
||||||
|
|
||||||
|
if classStr == "" {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
tcls := getClassesSlice(classStr)
|
||||||
|
|
||||||
|
for _, n := range s.Nodes {
|
||||||
|
classes, attr := getClassesAndAttr(n, true)
|
||||||
|
for _, tcl := range tcls {
|
||||||
|
if strings.Contains(classes, " "+tcl+" ") {
|
||||||
|
classes = strings.Replace(classes, " "+tcl+" ", " ", -1)
|
||||||
|
} else {
|
||||||
|
classes += tcl + " "
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setClasses(n, attr, classes)
|
||||||
|
}
|
||||||
|
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func getAttributePtr(attrName string, n *html.Node) *html.Attribute {
|
||||||
|
if n == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, a := range n.Attr {
|
||||||
|
if a.Key == attrName {
|
||||||
|
return &n.Attr[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Private function to get the specified attribute's value from a node.
|
||||||
|
func getAttributeValue(attrName string, n *html.Node) (val string, exists bool) {
|
||||||
|
if a := getAttributePtr(attrName, n); a != nil {
|
||||||
|
val = a.Val
|
||||||
|
exists = true
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get and normalize the "class" attribute from the node.
|
||||||
|
func getClassesAndAttr(n *html.Node, create bool) (classes string, attr *html.Attribute) {
|
||||||
|
// Applies only to element nodes
|
||||||
|
if n.Type == html.ElementNode {
|
||||||
|
attr = getAttributePtr("class", n)
|
||||||
|
if attr == nil && create {
|
||||||
|
n.Attr = append(n.Attr, html.Attribute{
|
||||||
|
Key: "class",
|
||||||
|
Val: "",
|
||||||
|
})
|
||||||
|
attr = &n.Attr[len(n.Attr)-1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if attr == nil {
|
||||||
|
classes = " "
|
||||||
|
} else {
|
||||||
|
classes = rxClassTrim.ReplaceAllString(" "+attr.Val+" ", " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func getClassesSlice(classes string) []string {
|
||||||
|
return strings.Split(rxClassTrim.ReplaceAllString(" "+classes+" ", " "), " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
func removeAttr(n *html.Node, attrName string) {
|
||||||
|
for i, a := range n.Attr {
|
||||||
|
if a.Key == attrName {
|
||||||
|
n.Attr[i], n.Attr[len(n.Attr)-1], n.Attr =
|
||||||
|
n.Attr[len(n.Attr)-1], html.Attribute{}, n.Attr[:len(n.Attr)-1]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func setClasses(n *html.Node, attr *html.Attribute, classes string) {
|
||||||
|
classes = strings.TrimSpace(classes)
|
||||||
|
if classes == "" {
|
||||||
|
removeAttr(n, "class")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
attr.Val = classes
|
||||||
|
}
|
49
vendor/github.com/PuerkitoBio/goquery/query.go
generated
vendored
Normal file
49
vendor/github.com/PuerkitoBio/goquery/query.go
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
import "golang.org/x/net/html"
|
||||||
|
|
||||||
|
// Is checks the current matched set of elements against a selector and
|
||||||
|
// returns true if at least one of these elements matches.
|
||||||
|
func (s *Selection) Is(selector string) bool {
|
||||||
|
return s.IsMatcher(compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsMatcher checks the current matched set of elements against a matcher and
|
||||||
|
// returns true if at least one of these elements matches.
|
||||||
|
func (s *Selection) IsMatcher(m Matcher) bool {
|
||||||
|
if len(s.Nodes) > 0 {
|
||||||
|
if len(s.Nodes) == 1 {
|
||||||
|
return m.Match(s.Nodes[0])
|
||||||
|
}
|
||||||
|
return len(m.Filter(s.Nodes)) > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsFunction checks the current matched set of elements against a predicate and
|
||||||
|
// returns true if at least one of these elements matches.
|
||||||
|
func (s *Selection) IsFunction(f func(int, *Selection) bool) bool {
|
||||||
|
return s.FilterFunction(f).Length() > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsSelection checks the current matched set of elements against a Selection object
|
||||||
|
// and returns true if at least one of these elements matches.
|
||||||
|
func (s *Selection) IsSelection(sel *Selection) bool {
|
||||||
|
return s.FilterSelection(sel).Length() > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsNodes checks the current matched set of elements against the specified nodes
|
||||||
|
// and returns true if at least one of these elements matches.
|
||||||
|
func (s *Selection) IsNodes(nodes ...*html.Node) bool {
|
||||||
|
return s.FilterNodes(nodes...).Length() > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Contains returns true if the specified Node is within,
|
||||||
|
// at any depth, one of the nodes in the Selection object.
|
||||||
|
// It is NOT inclusive, to behave like jQuery's implementation, and
|
||||||
|
// unlike Javascript's .contains, so if the contained
|
||||||
|
// node is itself in the selection, it returns false.
|
||||||
|
func (s *Selection) Contains(n *html.Node) bool {
|
||||||
|
return sliceContains(s.Nodes, n)
|
||||||
|
}
|
698
vendor/github.com/PuerkitoBio/goquery/traversal.go
generated
vendored
Normal file
698
vendor/github.com/PuerkitoBio/goquery/traversal.go
generated
vendored
Normal file
@ -0,0 +1,698 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
import "golang.org/x/net/html"
|
||||||
|
|
||||||
|
type siblingType int
|
||||||
|
|
||||||
|
// Sibling type, used internally when iterating over children at the same
|
||||||
|
// level (siblings) to specify which nodes are requested.
|
||||||
|
const (
|
||||||
|
siblingPrevUntil siblingType = iota - 3
|
||||||
|
siblingPrevAll
|
||||||
|
siblingPrev
|
||||||
|
siblingAll
|
||||||
|
siblingNext
|
||||||
|
siblingNextAll
|
||||||
|
siblingNextUntil
|
||||||
|
siblingAllIncludingNonElements
|
||||||
|
)
|
||||||
|
|
||||||
|
// Find gets the descendants of each element in the current set of matched
|
||||||
|
// elements, filtered by a selector. It returns a new Selection object
|
||||||
|
// containing these matched elements.
|
||||||
|
func (s *Selection) Find(selector string) *Selection {
|
||||||
|
return pushStack(s, findWithMatcher(s.Nodes, compileMatcher(selector)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindMatcher gets the descendants of each element in the current set of matched
|
||||||
|
// elements, filtered by the matcher. It returns a new Selection object
|
||||||
|
// containing these matched elements.
|
||||||
|
func (s *Selection) FindMatcher(m Matcher) *Selection {
|
||||||
|
return pushStack(s, findWithMatcher(s.Nodes, m))
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindSelection gets the descendants of each element in the current
|
||||||
|
// Selection, filtered by a Selection. It returns a new Selection object
|
||||||
|
// containing these matched elements.
|
||||||
|
func (s *Selection) FindSelection(sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return pushStack(s, nil)
|
||||||
|
}
|
||||||
|
return s.FindNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindNodes gets the descendants of each element in the current
|
||||||
|
// Selection, filtered by some nodes. It returns a new Selection object
|
||||||
|
// containing these matched elements.
|
||||||
|
func (s *Selection) FindNodes(nodes ...*html.Node) *Selection {
|
||||||
|
return pushStack(s, mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
|
||||||
|
if sliceContains(s.Nodes, n) {
|
||||||
|
return []*html.Node{n}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Contents gets the children of each element in the Selection,
|
||||||
|
// including text and comment nodes. It returns a new Selection object
|
||||||
|
// containing these elements.
|
||||||
|
func (s *Selection) Contents() *Selection {
|
||||||
|
return pushStack(s, getChildrenNodes(s.Nodes, siblingAllIncludingNonElements))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContentsFiltered gets the children of each element in the Selection,
|
||||||
|
// filtered by the specified selector. It returns a new Selection
|
||||||
|
// object containing these elements. Since selectors only act on Element nodes,
|
||||||
|
// this function is an alias to ChildrenFiltered unless the selector is empty,
|
||||||
|
// in which case it is an alias to Contents.
|
||||||
|
func (s *Selection) ContentsFiltered(selector string) *Selection {
|
||||||
|
if selector != "" {
|
||||||
|
return s.ChildrenFiltered(selector)
|
||||||
|
}
|
||||||
|
return s.Contents()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContentsMatcher gets the children of each element in the Selection,
|
||||||
|
// filtered by the specified matcher. It returns a new Selection
|
||||||
|
// object containing these elements. Since matchers only act on Element nodes,
|
||||||
|
// this function is an alias to ChildrenMatcher.
|
||||||
|
func (s *Selection) ContentsMatcher(m Matcher) *Selection {
|
||||||
|
return s.ChildrenMatcher(m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Children gets the child elements of each element in the Selection.
|
||||||
|
// It returns a new Selection object containing these elements.
|
||||||
|
func (s *Selection) Children() *Selection {
|
||||||
|
return pushStack(s, getChildrenNodes(s.Nodes, siblingAll))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChildrenFiltered gets the child elements of each element in the Selection,
|
||||||
|
// filtered by the specified selector. It returns a new
|
||||||
|
// Selection object containing these elements.
|
||||||
|
func (s *Selection) ChildrenFiltered(selector string) *Selection {
|
||||||
|
return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChildrenMatcher gets the child elements of each element in the Selection,
|
||||||
|
// filtered by the specified matcher. It returns a new
|
||||||
|
// Selection object containing these elements.
|
||||||
|
func (s *Selection) ChildrenMatcher(m Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parent gets the parent of each element in the Selection. It returns a
|
||||||
|
// new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) Parent() *Selection {
|
||||||
|
return pushStack(s, getParentNodes(s.Nodes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentFiltered gets the parent of each element in the Selection filtered by a
|
||||||
|
// selector. It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) ParentFiltered(selector string) *Selection {
|
||||||
|
return filterAndPush(s, getParentNodes(s.Nodes), compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentMatcher gets the parent of each element in the Selection filtered by a
|
||||||
|
// matcher. It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) ParentMatcher(m Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getParentNodes(s.Nodes), m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Closest gets the first element that matches the selector by testing the
|
||||||
|
// element itself and traversing up through its ancestors in the DOM tree.
|
||||||
|
func (s *Selection) Closest(selector string) *Selection {
|
||||||
|
cs := compileMatcher(selector)
|
||||||
|
return s.ClosestMatcher(cs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClosestMatcher gets the first element that matches the matcher by testing the
|
||||||
|
// element itself and traversing up through its ancestors in the DOM tree.
|
||||||
|
func (s *Selection) ClosestMatcher(m Matcher) *Selection {
|
||||||
|
return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
|
||||||
|
// For each node in the selection, test the node itself, then each parent
|
||||||
|
// until a match is found.
|
||||||
|
for ; n != nil; n = n.Parent {
|
||||||
|
if m.Match(n) {
|
||||||
|
return []*html.Node{n}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClosestNodes gets the first element that matches one of the nodes by testing the
|
||||||
|
// element itself and traversing up through its ancestors in the DOM tree.
|
||||||
|
func (s *Selection) ClosestNodes(nodes ...*html.Node) *Selection {
|
||||||
|
set := make(map[*html.Node]bool)
|
||||||
|
for _, n := range nodes {
|
||||||
|
set[n] = true
|
||||||
|
}
|
||||||
|
return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
|
||||||
|
// For each node in the selection, test the node itself, then each parent
|
||||||
|
// until a match is found.
|
||||||
|
for ; n != nil; n = n.Parent {
|
||||||
|
if set[n] {
|
||||||
|
return []*html.Node{n}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClosestSelection gets the first element that matches one of the nodes in the
|
||||||
|
// Selection by testing the element itself and traversing up through its ancestors
|
||||||
|
// in the DOM tree.
|
||||||
|
func (s *Selection) ClosestSelection(sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return pushStack(s, nil)
|
||||||
|
}
|
||||||
|
return s.ClosestNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parents gets the ancestors of each element in the current Selection. It
|
||||||
|
// returns a new Selection object with the matched elements.
|
||||||
|
func (s *Selection) Parents() *Selection {
|
||||||
|
return pushStack(s, getParentsNodes(s.Nodes, nil, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsFiltered gets the ancestors of each element in the current
|
||||||
|
// Selection. It returns a new Selection object with the matched elements.
|
||||||
|
func (s *Selection) ParentsFiltered(selector string) *Selection {
|
||||||
|
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsMatcher gets the ancestors of each element in the current
|
||||||
|
// Selection. It returns a new Selection object with the matched elements.
|
||||||
|
func (s *Selection) ParentsMatcher(m Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsUntil gets the ancestors of each element in the Selection, up to but
|
||||||
|
// not including the element matched by the selector. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsUntil(selector string) *Selection {
|
||||||
|
return pushStack(s, getParentsNodes(s.Nodes, compileMatcher(selector), nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsUntilMatcher gets the ancestors of each element in the Selection, up to but
|
||||||
|
// not including the element matched by the matcher. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsUntilMatcher(m Matcher) *Selection {
|
||||||
|
return pushStack(s, getParentsNodes(s.Nodes, m, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsUntilSelection gets the ancestors of each element in the Selection,
|
||||||
|
// up to but not including the elements in the specified Selection. It returns a
|
||||||
|
// new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsUntilSelection(sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return s.Parents()
|
||||||
|
}
|
||||||
|
return s.ParentsUntilNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsUntilNodes gets the ancestors of each element in the Selection,
|
||||||
|
// up to but not including the specified nodes. It returns a
|
||||||
|
// new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsUntilNodes(nodes ...*html.Node) *Selection {
|
||||||
|
return pushStack(s, getParentsNodes(s.Nodes, nil, nodes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsFilteredUntil is like ParentsUntil, with the option to filter the
|
||||||
|
// results based on a selector string. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsFilteredUntil(filterSelector, untilSelector string) *Selection {
|
||||||
|
return filterAndPush(s, getParentsNodes(s.Nodes, compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsFilteredUntilMatcher is like ParentsUntilMatcher, with the option to filter the
|
||||||
|
// results based on a matcher. It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsFilteredUntilMatcher(filter, until Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getParentsNodes(s.Nodes, until, nil), filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsFilteredUntilSelection is like ParentsUntilSelection, with the
|
||||||
|
// option to filter the results based on a selector string. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
|
||||||
|
return s.ParentsMatcherUntilSelection(compileMatcher(filterSelector), sel)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsMatcherUntilSelection is like ParentsUntilSelection, with the
|
||||||
|
// option to filter the results based on a matcher. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return s.ParentsMatcher(filter)
|
||||||
|
}
|
||||||
|
return s.ParentsMatcherUntilNodes(filter, sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsFilteredUntilNodes is like ParentsUntilNodes, with the
|
||||||
|
// option to filter the results based on a selector string. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
|
||||||
|
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), compileMatcher(filterSelector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParentsMatcherUntilNodes is like ParentsUntilNodes, with the
|
||||||
|
// option to filter the results based on a matcher. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) ParentsMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
|
||||||
|
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Siblings gets the siblings of each element in the Selection. It returns
|
||||||
|
// a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) Siblings() *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SiblingsFiltered gets the siblings of each element in the Selection
|
||||||
|
// filtered by a selector. It returns a new Selection object containing the
|
||||||
|
// matched elements.
|
||||||
|
func (s *Selection) SiblingsFiltered(selector string) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SiblingsMatcher gets the siblings of each element in the Selection
|
||||||
|
// filtered by a matcher. It returns a new Selection object containing the
|
||||||
|
// matched elements.
|
||||||
|
func (s *Selection) SiblingsMatcher(m Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next gets the immediately following sibling of each element in the
|
||||||
|
// Selection. It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) Next() *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextFiltered gets the immediately following sibling of each element in the
|
||||||
|
// Selection filtered by a selector. It returns a new Selection object
|
||||||
|
// containing the matched elements.
|
||||||
|
func (s *Selection) NextFiltered(selector string) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextMatcher gets the immediately following sibling of each element in the
|
||||||
|
// Selection filtered by a matcher. It returns a new Selection object
|
||||||
|
// containing the matched elements.
|
||||||
|
func (s *Selection) NextMatcher(m Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextAll gets all the following siblings of each element in the
|
||||||
|
// Selection. It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) NextAll() *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextAllFiltered gets all the following siblings of each element in the
|
||||||
|
// Selection filtered by a selector. It returns a new Selection object
|
||||||
|
// containing the matched elements.
|
||||||
|
func (s *Selection) NextAllFiltered(selector string) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextAllMatcher gets all the following siblings of each element in the
|
||||||
|
// Selection filtered by a matcher. It returns a new Selection object
|
||||||
|
// containing the matched elements.
|
||||||
|
func (s *Selection) NextAllMatcher(m Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prev gets the immediately preceding sibling of each element in the
|
||||||
|
// Selection. It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) Prev() *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevFiltered gets the immediately preceding sibling of each element in the
|
||||||
|
// Selection filtered by a selector. It returns a new Selection object
|
||||||
|
// containing the matched elements.
|
||||||
|
func (s *Selection) PrevFiltered(selector string) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevMatcher gets the immediately preceding sibling of each element in the
|
||||||
|
// Selection filtered by a matcher. It returns a new Selection object
|
||||||
|
// containing the matched elements.
|
||||||
|
func (s *Selection) PrevMatcher(m Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevAll gets all the preceding siblings of each element in the
|
||||||
|
// Selection. It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) PrevAll() *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevAllFiltered gets all the preceding siblings of each element in the
|
||||||
|
// Selection filtered by a selector. It returns a new Selection object
|
||||||
|
// containing the matched elements.
|
||||||
|
func (s *Selection) PrevAllFiltered(selector string) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), compileMatcher(selector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevAllMatcher gets all the preceding siblings of each element in the
|
||||||
|
// Selection filtered by a matcher. It returns a new Selection object
|
||||||
|
// containing the matched elements.
|
||||||
|
func (s *Selection) PrevAllMatcher(m Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextUntil gets all following siblings of each element up to but not
|
||||||
|
// including the element matched by the selector. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) NextUntil(selector string) *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||||
|
compileMatcher(selector), nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextUntilMatcher gets all following siblings of each element up to but not
|
||||||
|
// including the element matched by the matcher. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) NextUntilMatcher(m Matcher) *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||||
|
m, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextUntilSelection gets all following siblings of each element up to but not
|
||||||
|
// including the element matched by the Selection. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) NextUntilSelection(sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return s.NextAll()
|
||||||
|
}
|
||||||
|
return s.NextUntilNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextUntilNodes gets all following siblings of each element up to but not
|
||||||
|
// including the element matched by the nodes. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) NextUntilNodes(nodes ...*html.Node) *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||||
|
nil, nodes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevUntil gets all preceding siblings of each element up to but not
|
||||||
|
// including the element matched by the selector. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) PrevUntil(selector string) *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||||
|
compileMatcher(selector), nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevUntilMatcher gets all preceding siblings of each element up to but not
|
||||||
|
// including the element matched by the matcher. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) PrevUntilMatcher(m Matcher) *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||||
|
m, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevUntilSelection gets all preceding siblings of each element up to but not
|
||||||
|
// including the element matched by the Selection. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) PrevUntilSelection(sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return s.PrevAll()
|
||||||
|
}
|
||||||
|
return s.PrevUntilNodes(sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevUntilNodes gets all preceding siblings of each element up to but not
|
||||||
|
// including the element matched by the nodes. It returns a new Selection
|
||||||
|
// object containing the matched elements.
|
||||||
|
func (s *Selection) PrevUntilNodes(nodes ...*html.Node) *Selection {
|
||||||
|
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||||
|
nil, nodes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextFilteredUntil is like NextUntil, with the option to filter
|
||||||
|
// the results based on a selector string.
|
||||||
|
// It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) NextFilteredUntil(filterSelector, untilSelector string) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||||
|
compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextFilteredUntilMatcher is like NextUntilMatcher, with the option to filter
|
||||||
|
// the results based on a matcher.
|
||||||
|
// It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) NextFilteredUntilMatcher(filter, until Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||||
|
until, nil), filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextFilteredUntilSelection is like NextUntilSelection, with the
|
||||||
|
// option to filter the results based on a selector string. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) NextFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
|
||||||
|
return s.NextMatcherUntilSelection(compileMatcher(filterSelector), sel)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextMatcherUntilSelection is like NextUntilSelection, with the
|
||||||
|
// option to filter the results based on a matcher. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) NextMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return s.NextMatcher(filter)
|
||||||
|
}
|
||||||
|
return s.NextMatcherUntilNodes(filter, sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextFilteredUntilNodes is like NextUntilNodes, with the
|
||||||
|
// option to filter the results based on a selector string. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) NextFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||||
|
nil, nodes), compileMatcher(filterSelector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NextMatcherUntilNodes is like NextUntilNodes, with the
|
||||||
|
// option to filter the results based on a matcher. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) NextMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
|
||||||
|
nil, nodes), filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevFilteredUntil is like PrevUntil, with the option to filter
|
||||||
|
// the results based on a selector string.
|
||||||
|
// It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) PrevFilteredUntil(filterSelector, untilSelector string) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||||
|
compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevFilteredUntilMatcher is like PrevUntilMatcher, with the option to filter
|
||||||
|
// the results based on a matcher.
|
||||||
|
// It returns a new Selection object containing the matched elements.
|
||||||
|
func (s *Selection) PrevFilteredUntilMatcher(filter, until Matcher) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||||
|
until, nil), filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevFilteredUntilSelection is like PrevUntilSelection, with the
|
||||||
|
// option to filter the results based on a selector string. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) PrevFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
|
||||||
|
return s.PrevMatcherUntilSelection(compileMatcher(filterSelector), sel)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevMatcherUntilSelection is like PrevUntilSelection, with the
|
||||||
|
// option to filter the results based on a matcher. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) PrevMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
|
||||||
|
if sel == nil {
|
||||||
|
return s.PrevMatcher(filter)
|
||||||
|
}
|
||||||
|
return s.PrevMatcherUntilNodes(filter, sel.Nodes...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevFilteredUntilNodes is like PrevUntilNodes, with the
|
||||||
|
// option to filter the results based on a selector string. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) PrevFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||||
|
nil, nodes), compileMatcher(filterSelector))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrevMatcherUntilNodes is like PrevUntilNodes, with the
|
||||||
|
// option to filter the results based on a matcher. It returns a new
|
||||||
|
// Selection object containing the matched elements.
|
||||||
|
func (s *Selection) PrevMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
|
||||||
|
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
|
||||||
|
nil, nodes), filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter and push filters the nodes based on a matcher, and pushes the results
|
||||||
|
// on the stack, with the srcSel as previous selection.
|
||||||
|
func filterAndPush(srcSel *Selection, nodes []*html.Node, m Matcher) *Selection {
|
||||||
|
// Create a temporary Selection with the specified nodes to filter using winnow
|
||||||
|
sel := &Selection{nodes, srcSel.document, nil}
|
||||||
|
// Filter based on matcher and push on stack
|
||||||
|
return pushStack(srcSel, winnow(sel, m, true))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Internal implementation of Find that return raw nodes.
|
||||||
|
func findWithMatcher(nodes []*html.Node, m Matcher) []*html.Node {
|
||||||
|
// Map nodes to find the matches within the children of each node
|
||||||
|
return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
|
||||||
|
// Go down one level, becausejQuery's Find selects only within descendants
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if c.Type == html.ElementNode {
|
||||||
|
result = append(result, m.MatchAll(c)...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Internal implementation to get all parent nodes, stopping at the specified
|
||||||
|
// node (or nil if no stop).
|
||||||
|
func getParentsNodes(nodes []*html.Node, stopm Matcher, stopNodes []*html.Node) []*html.Node {
|
||||||
|
return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
|
||||||
|
for p := n.Parent; p != nil; p = p.Parent {
|
||||||
|
sel := newSingleSelection(p, nil)
|
||||||
|
if stopm != nil {
|
||||||
|
if sel.IsMatcher(stopm) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
} else if len(stopNodes) > 0 {
|
||||||
|
if sel.IsNodes(stopNodes...) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if p.Type == html.ElementNode {
|
||||||
|
result = append(result, p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Internal implementation of sibling nodes that return a raw slice of matches.
|
||||||
|
func getSiblingNodes(nodes []*html.Node, st siblingType, untilm Matcher, untilNodes []*html.Node) []*html.Node {
|
||||||
|
var f func(*html.Node) bool
|
||||||
|
|
||||||
|
// If the requested siblings are ...Until, create the test function to
|
||||||
|
// determine if the until condition is reached (returns true if it is)
|
||||||
|
if st == siblingNextUntil || st == siblingPrevUntil {
|
||||||
|
f = func(n *html.Node) bool {
|
||||||
|
if untilm != nil {
|
||||||
|
// Matcher-based condition
|
||||||
|
sel := newSingleSelection(n, nil)
|
||||||
|
return sel.IsMatcher(untilm)
|
||||||
|
} else if len(untilNodes) > 0 {
|
||||||
|
// Nodes-based condition
|
||||||
|
sel := newSingleSelection(n, nil)
|
||||||
|
return sel.IsNodes(untilNodes...)
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
|
||||||
|
return getChildrenWithSiblingType(n.Parent, st, n, f)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gets the children nodes of each node in the specified slice of nodes,
|
||||||
|
// based on the sibling type request.
|
||||||
|
func getChildrenNodes(nodes []*html.Node, st siblingType) []*html.Node {
|
||||||
|
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
|
||||||
|
return getChildrenWithSiblingType(n, st, nil, nil)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gets the children of the specified parent, based on the requested sibling
|
||||||
|
// type, skipping a specified node if required.
|
||||||
|
func getChildrenWithSiblingType(parent *html.Node, st siblingType, skipNode *html.Node,
|
||||||
|
untilFunc func(*html.Node) bool) (result []*html.Node) {
|
||||||
|
|
||||||
|
// Create the iterator function
|
||||||
|
var iter = func(cur *html.Node) (ret *html.Node) {
|
||||||
|
// Based on the sibling type requested, iterate the right way
|
||||||
|
for {
|
||||||
|
switch st {
|
||||||
|
case siblingAll, siblingAllIncludingNonElements:
|
||||||
|
if cur == nil {
|
||||||
|
// First iteration, start with first child of parent
|
||||||
|
// Skip node if required
|
||||||
|
if ret = parent.FirstChild; ret == skipNode && skipNode != nil {
|
||||||
|
ret = skipNode.NextSibling
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Skip node if required
|
||||||
|
if ret = cur.NextSibling; ret == skipNode && skipNode != nil {
|
||||||
|
ret = skipNode.NextSibling
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case siblingPrev, siblingPrevAll, siblingPrevUntil:
|
||||||
|
if cur == nil {
|
||||||
|
// Start with previous sibling of the skip node
|
||||||
|
ret = skipNode.PrevSibling
|
||||||
|
} else {
|
||||||
|
ret = cur.PrevSibling
|
||||||
|
}
|
||||||
|
case siblingNext, siblingNextAll, siblingNextUntil:
|
||||||
|
if cur == nil {
|
||||||
|
// Start with next sibling of the skip node
|
||||||
|
ret = skipNode.NextSibling
|
||||||
|
} else {
|
||||||
|
ret = cur.NextSibling
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
panic("Invalid sibling type.")
|
||||||
|
}
|
||||||
|
if ret == nil || ret.Type == html.ElementNode || st == siblingAllIncludingNonElements {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Not a valid node, try again from this one
|
||||||
|
cur = ret
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for c := iter(nil); c != nil; c = iter(c) {
|
||||||
|
// If this is an ...Until case, test before append (returns true
|
||||||
|
// if the until condition is reached)
|
||||||
|
if st == siblingNextUntil || st == siblingPrevUntil {
|
||||||
|
if untilFunc(c) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result = append(result, c)
|
||||||
|
if st == siblingNext || st == siblingPrev {
|
||||||
|
// Only one node was requested (immediate next or previous), so exit
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Internal implementation of parent nodes that return a raw slice of Nodes.
|
||||||
|
func getParentNodes(nodes []*html.Node) []*html.Node {
|
||||||
|
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
|
||||||
|
if n.Parent != nil && n.Parent.Type == html.ElementNode {
|
||||||
|
return []*html.Node{n.Parent}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Internal map function used by many traversing methods. Takes the source nodes
|
||||||
|
// to iterate on and the mapping function that returns an array of nodes.
|
||||||
|
// Returns an array of nodes mapped by calling the callback function once for
|
||||||
|
// each node in the source nodes.
|
||||||
|
func mapNodes(nodes []*html.Node, f func(int, *html.Node) []*html.Node) (result []*html.Node) {
|
||||||
|
set := make(map[*html.Node]bool)
|
||||||
|
for i, n := range nodes {
|
||||||
|
if vals := f(i, n); len(vals) > 0 {
|
||||||
|
result = appendWithoutDuplicates(result, vals, set)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
141
vendor/github.com/PuerkitoBio/goquery/type.go
generated
vendored
Normal file
141
vendor/github.com/PuerkitoBio/goquery/type.go
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
|
||||||
|
"github.com/andybalholm/cascadia"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Document represents an HTML document to be manipulated. Unlike jQuery, which
|
||||||
|
// is loaded as part of a DOM document, and thus acts upon its containing
|
||||||
|
// document, GoQuery doesn't know which HTML document to act upon. So it needs
|
||||||
|
// to be told, and that's what the Document class is for. It holds the root
|
||||||
|
// document node to manipulate, and can make selections on this document.
|
||||||
|
type Document struct {
|
||||||
|
*Selection
|
||||||
|
Url *url.URL
|
||||||
|
rootNode *html.Node
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDocumentFromNode is a Document constructor that takes a root html Node
|
||||||
|
// as argument.
|
||||||
|
func NewDocumentFromNode(root *html.Node) *Document {
|
||||||
|
return newDocument(root, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDocument is a Document constructor that takes a string URL as argument.
|
||||||
|
// It loads the specified document, parses it, and stores the root Document
|
||||||
|
// node, ready to be manipulated.
|
||||||
|
//
|
||||||
|
// Deprecated: Use the net/http standard library package to make the request
|
||||||
|
// and validate the response before calling goquery.NewDocumentFromReader
|
||||||
|
// with the response's body.
|
||||||
|
func NewDocument(url string) (*Document, error) {
|
||||||
|
// Load the URL
|
||||||
|
res, e := http.Get(url)
|
||||||
|
if e != nil {
|
||||||
|
return nil, e
|
||||||
|
}
|
||||||
|
return NewDocumentFromResponse(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDocumentFromReader returns a Document from an io.Reader.
|
||||||
|
// It returns an error as second value if the reader's data cannot be parsed
|
||||||
|
// as html. It does not check if the reader is also an io.Closer, the
|
||||||
|
// provided reader is never closed by this call. It is the responsibility
|
||||||
|
// of the caller to close it if required.
|
||||||
|
func NewDocumentFromReader(r io.Reader) (*Document, error) {
|
||||||
|
root, e := html.Parse(r)
|
||||||
|
if e != nil {
|
||||||
|
return nil, e
|
||||||
|
}
|
||||||
|
return newDocument(root, nil), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDocumentFromResponse is another Document constructor that takes an http response as argument.
|
||||||
|
// It loads the specified response's document, parses it, and stores the root Document
|
||||||
|
// node, ready to be manipulated. The response's body is closed on return.
|
||||||
|
//
|
||||||
|
// Deprecated: Use goquery.NewDocumentFromReader with the response's body.
|
||||||
|
func NewDocumentFromResponse(res *http.Response) (*Document, error) {
|
||||||
|
if res == nil {
|
||||||
|
return nil, errors.New("Response is nil")
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
if res.Request == nil {
|
||||||
|
return nil, errors.New("Response.Request is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the HTML into nodes
|
||||||
|
root, e := html.Parse(res.Body)
|
||||||
|
if e != nil {
|
||||||
|
return nil, e
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create and fill the document
|
||||||
|
return newDocument(root, res.Request.URL), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloneDocument creates a deep-clone of a document.
|
||||||
|
func CloneDocument(doc *Document) *Document {
|
||||||
|
return newDocument(cloneNode(doc.rootNode), doc.Url)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Private constructor, make sure all fields are correctly filled.
|
||||||
|
func newDocument(root *html.Node, url *url.URL) *Document {
|
||||||
|
// Create and fill the document
|
||||||
|
d := &Document{nil, url, root}
|
||||||
|
d.Selection = newSingleSelection(root, d)
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Selection represents a collection of nodes matching some criteria. The
|
||||||
|
// initial Selection can be created by using Document.Find, and then
|
||||||
|
// manipulated using the jQuery-like chainable syntax and methods.
|
||||||
|
type Selection struct {
|
||||||
|
Nodes []*html.Node
|
||||||
|
document *Document
|
||||||
|
prevSel *Selection
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper constructor to create an empty selection
|
||||||
|
func newEmptySelection(doc *Document) *Selection {
|
||||||
|
return &Selection{nil, doc, nil}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper constructor to create a selection of only one node
|
||||||
|
func newSingleSelection(node *html.Node, doc *Document) *Selection {
|
||||||
|
return &Selection{[]*html.Node{node}, doc, nil}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Matcher is an interface that defines the methods to match
|
||||||
|
// HTML nodes against a compiled selector string. Cascadia's
|
||||||
|
// Selector implements this interface.
|
||||||
|
type Matcher interface {
|
||||||
|
Match(*html.Node) bool
|
||||||
|
MatchAll(*html.Node) []*html.Node
|
||||||
|
Filter([]*html.Node) []*html.Node
|
||||||
|
}
|
||||||
|
|
||||||
|
// compileMatcher compiles the selector string s and returns
|
||||||
|
// the corresponding Matcher. If s is an invalid selector string,
|
||||||
|
// it returns a Matcher that fails all matches.
|
||||||
|
func compileMatcher(s string) Matcher {
|
||||||
|
cs, err := cascadia.Compile(s)
|
||||||
|
if err != nil {
|
||||||
|
return invalidMatcher{}
|
||||||
|
}
|
||||||
|
return cs
|
||||||
|
}
|
||||||
|
|
||||||
|
// invalidMatcher is a Matcher that always fails to match.
|
||||||
|
type invalidMatcher struct{}
|
||||||
|
|
||||||
|
func (invalidMatcher) Match(n *html.Node) bool { return false }
|
||||||
|
func (invalidMatcher) MatchAll(n *html.Node) []*html.Node { return nil }
|
||||||
|
func (invalidMatcher) Filter(ns []*html.Node) []*html.Node { return nil }
|
161
vendor/github.com/PuerkitoBio/goquery/utilities.go
generated
vendored
Normal file
161
vendor/github.com/PuerkitoBio/goquery/utilities.go
generated
vendored
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
package goquery
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
// used to determine if a set (map[*html.Node]bool) should be used
|
||||||
|
// instead of iterating over a slice. The set uses more memory and
|
||||||
|
// is slower than slice iteration for small N.
|
||||||
|
const minNodesForSet = 1000
|
||||||
|
|
||||||
|
var nodeNames = []string{
|
||||||
|
html.ErrorNode: "#error",
|
||||||
|
html.TextNode: "#text",
|
||||||
|
html.DocumentNode: "#document",
|
||||||
|
html.CommentNode: "#comment",
|
||||||
|
}
|
||||||
|
|
||||||
|
// NodeName returns the node name of the first element in the selection.
|
||||||
|
// It tries to behave in a similar way as the DOM's nodeName property
|
||||||
|
// (https://developer.mozilla.org/en-US/docs/Web/API/Node/nodeName).
|
||||||
|
//
|
||||||
|
// Go's net/html package defines the following node types, listed with
|
||||||
|
// the corresponding returned value from this function:
|
||||||
|
//
|
||||||
|
// ErrorNode : #error
|
||||||
|
// TextNode : #text
|
||||||
|
// DocumentNode : #document
|
||||||
|
// ElementNode : the element's tag name
|
||||||
|
// CommentNode : #comment
|
||||||
|
// DoctypeNode : the name of the document type
|
||||||
|
//
|
||||||
|
func NodeName(s *Selection) string {
|
||||||
|
if s.Length() == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
switch n := s.Get(0); n.Type {
|
||||||
|
case html.ElementNode, html.DoctypeNode:
|
||||||
|
return n.Data
|
||||||
|
default:
|
||||||
|
if n.Type >= 0 && int(n.Type) < len(nodeNames) {
|
||||||
|
return nodeNames[n.Type]
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OuterHtml returns the outer HTML rendering of the first item in
|
||||||
|
// the selection - that is, the HTML including the first element's
|
||||||
|
// tag and attributes.
|
||||||
|
//
|
||||||
|
// Unlike InnerHtml, this is a function and not a method on the Selection,
|
||||||
|
// because this is not a jQuery method (in javascript-land, this is
|
||||||
|
// a property provided by the DOM).
|
||||||
|
func OuterHtml(s *Selection) (string, error) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
|
||||||
|
if s.Length() == 0 {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
n := s.Get(0)
|
||||||
|
if err := html.Render(&buf, n); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return buf.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loop through all container nodes to search for the target node.
|
||||||
|
func sliceContains(container []*html.Node, contained *html.Node) bool {
|
||||||
|
for _, n := range container {
|
||||||
|
if nodeContains(n, contained) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks if the contained node is within the container node.
|
||||||
|
func nodeContains(container *html.Node, contained *html.Node) bool {
|
||||||
|
// Check if the parent of the contained node is the container node, traversing
|
||||||
|
// upward until the top is reached, or the container is found.
|
||||||
|
for contained = contained.Parent; contained != nil; contained = contained.Parent {
|
||||||
|
if container == contained {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks if the target node is in the slice of nodes.
|
||||||
|
func isInSlice(slice []*html.Node, node *html.Node) bool {
|
||||||
|
return indexInSlice(slice, node) > -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the index of the target node in the slice, or -1.
|
||||||
|
func indexInSlice(slice []*html.Node, node *html.Node) int {
|
||||||
|
if node != nil {
|
||||||
|
for i, n := range slice {
|
||||||
|
if n == node {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Appends the new nodes to the target slice, making sure no duplicate is added.
|
||||||
|
// There is no check to the original state of the target slice, so it may still
|
||||||
|
// contain duplicates. The target slice is returned because append() may create
|
||||||
|
// a new underlying array. If targetSet is nil, a local set is created with the
|
||||||
|
// target if len(target) + len(nodes) is greater than minNodesForSet.
|
||||||
|
func appendWithoutDuplicates(target []*html.Node, nodes []*html.Node, targetSet map[*html.Node]bool) []*html.Node {
|
||||||
|
// if there are not that many nodes, don't use the map, faster to just use nested loops
|
||||||
|
// (unless a non-nil targetSet is passed, in which case the caller knows better).
|
||||||
|
if targetSet == nil && len(target)+len(nodes) < minNodesForSet {
|
||||||
|
for _, n := range nodes {
|
||||||
|
if !isInSlice(target, n) {
|
||||||
|
target = append(target, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return target
|
||||||
|
}
|
||||||
|
|
||||||
|
// if a targetSet is passed, then assume it is reliable, otherwise create one
|
||||||
|
// and initialize it with the current target contents.
|
||||||
|
if targetSet == nil {
|
||||||
|
targetSet = make(map[*html.Node]bool, len(target))
|
||||||
|
for _, n := range target {
|
||||||
|
targetSet[n] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, n := range nodes {
|
||||||
|
if !targetSet[n] {
|
||||||
|
target = append(target, n)
|
||||||
|
targetSet[n] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return target
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loop through a selection, returning only those nodes that pass the predicate
|
||||||
|
// function.
|
||||||
|
func grep(sel *Selection, predicate func(i int, s *Selection) bool) (result []*html.Node) {
|
||||||
|
for i, n := range sel.Nodes {
|
||||||
|
if predicate(i, newSingleSelection(n, sel.document)) {
|
||||||
|
result = append(result, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a new Selection object based on the specified nodes, and keeps the
|
||||||
|
// source Selection object on the stack (linked list).
|
||||||
|
func pushStack(fromSel *Selection, nodes []*html.Node) *Selection {
|
||||||
|
result := &Selection{nodes, fromSel.document, fromSel}
|
||||||
|
return result
|
||||||
|
}
|
14
vendor/github.com/andybalholm/cascadia/.travis.yml
generated
vendored
Normal file
14
vendor/github.com/andybalholm/cascadia/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
language: go
|
||||||
|
|
||||||
|
go:
|
||||||
|
- 1.3
|
||||||
|
- 1.4
|
||||||
|
|
||||||
|
install:
|
||||||
|
- go get github.com/andybalholm/cascadia
|
||||||
|
|
||||||
|
script:
|
||||||
|
- go test -v
|
||||||
|
|
||||||
|
notifications:
|
||||||
|
email: false
|
24
vendor/github.com/andybalholm/cascadia/LICENSE
generated
vendored
Executable file
24
vendor/github.com/andybalholm/cascadia/LICENSE
generated
vendored
Executable file
@ -0,0 +1,24 @@
|
|||||||
|
Copyright (c) 2011 Andy Balholm. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
7
vendor/github.com/andybalholm/cascadia/README.md
generated
vendored
Normal file
7
vendor/github.com/andybalholm/cascadia/README.md
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
# cascadia
|
||||||
|
|
||||||
|
[![](https://travis-ci.org/andybalholm/cascadia.svg)](https://travis-ci.org/andybalholm/cascadia)
|
||||||
|
|
||||||
|
The Cascadia package implements CSS selectors for use with the parse trees produced by the html package.
|
||||||
|
|
||||||
|
To test CSS selectors without writing Go code, check out [cascadia](https://github.com/suntong/cascadia) the command line tool, a thin wrapper around this package.
|
3
vendor/github.com/andybalholm/cascadia/go.mod
generated
vendored
Normal file
3
vendor/github.com/andybalholm/cascadia/go.mod
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
module "github.com/andybalholm/cascadia"
|
||||||
|
|
||||||
|
require "golang.org/x/net" v0.0.0-20180218175443-cbe0f9307d01
|
835
vendor/github.com/andybalholm/cascadia/parser.go
generated
vendored
Normal file
835
vendor/github.com/andybalholm/cascadia/parser.go
generated
vendored
Normal file
@ -0,0 +1,835 @@
|
|||||||
|
// Package cascadia is an implementation of CSS selectors.
|
||||||
|
package cascadia
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
// a parser for CSS selectors
|
||||||
|
type parser struct {
|
||||||
|
s string // the source text
|
||||||
|
i int // the current position
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseEscape parses a backslash escape.
|
||||||
|
func (p *parser) parseEscape() (result string, err error) {
|
||||||
|
if len(p.s) < p.i+2 || p.s[p.i] != '\\' {
|
||||||
|
return "", errors.New("invalid escape sequence")
|
||||||
|
}
|
||||||
|
|
||||||
|
start := p.i + 1
|
||||||
|
c := p.s[start]
|
||||||
|
switch {
|
||||||
|
case c == '\r' || c == '\n' || c == '\f':
|
||||||
|
return "", errors.New("escaped line ending outside string")
|
||||||
|
case hexDigit(c):
|
||||||
|
// unicode escape (hex)
|
||||||
|
var i int
|
||||||
|
for i = start; i < p.i+6 && i < len(p.s) && hexDigit(p.s[i]); i++ {
|
||||||
|
// empty
|
||||||
|
}
|
||||||
|
v, _ := strconv.ParseUint(p.s[start:i], 16, 21)
|
||||||
|
if len(p.s) > i {
|
||||||
|
switch p.s[i] {
|
||||||
|
case '\r':
|
||||||
|
i++
|
||||||
|
if len(p.s) > i && p.s[i] == '\n' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
case ' ', '\t', '\n', '\f':
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p.i = i
|
||||||
|
return string(rune(v)), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the literal character after the backslash.
|
||||||
|
result = p.s[start : start+1]
|
||||||
|
p.i += 2
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func hexDigit(c byte) bool {
|
||||||
|
return '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F'
|
||||||
|
}
|
||||||
|
|
||||||
|
// nameStart returns whether c can be the first character of an identifier
|
||||||
|
// (not counting an initial hyphen, or an escape sequence).
|
||||||
|
func nameStart(c byte) bool {
|
||||||
|
return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127
|
||||||
|
}
|
||||||
|
|
||||||
|
// nameChar returns whether c can be a character within an identifier
|
||||||
|
// (not counting an escape sequence).
|
||||||
|
func nameChar(c byte) bool {
|
||||||
|
return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127 ||
|
||||||
|
c == '-' || '0' <= c && c <= '9'
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseIdentifier parses an identifier.
|
||||||
|
func (p *parser) parseIdentifier() (result string, err error) {
|
||||||
|
startingDash := false
|
||||||
|
if len(p.s) > p.i && p.s[p.i] == '-' {
|
||||||
|
startingDash = true
|
||||||
|
p.i++
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(p.s) <= p.i {
|
||||||
|
return "", errors.New("expected identifier, found EOF instead")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c := p.s[p.i]; !(nameStart(c) || c == '\\') {
|
||||||
|
return "", fmt.Errorf("expected identifier, found %c instead", c)
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err = p.parseName()
|
||||||
|
if startingDash && err == nil {
|
||||||
|
result = "-" + result
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseName parses a name (which is like an identifier, but doesn't have
|
||||||
|
// extra restrictions on the first character).
|
||||||
|
func (p *parser) parseName() (result string, err error) {
|
||||||
|
i := p.i
|
||||||
|
loop:
|
||||||
|
for i < len(p.s) {
|
||||||
|
c := p.s[i]
|
||||||
|
switch {
|
||||||
|
case nameChar(c):
|
||||||
|
start := i
|
||||||
|
for i < len(p.s) && nameChar(p.s[i]) {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
result += p.s[start:i]
|
||||||
|
case c == '\\':
|
||||||
|
p.i = i
|
||||||
|
val, err := p.parseEscape()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
i = p.i
|
||||||
|
result += val
|
||||||
|
default:
|
||||||
|
break loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if result == "" {
|
||||||
|
return "", errors.New("expected name, found EOF instead")
|
||||||
|
}
|
||||||
|
|
||||||
|
p.i = i
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseString parses a single- or double-quoted string.
|
||||||
|
func (p *parser) parseString() (result string, err error) {
|
||||||
|
i := p.i
|
||||||
|
if len(p.s) < i+2 {
|
||||||
|
return "", errors.New("expected string, found EOF instead")
|
||||||
|
}
|
||||||
|
|
||||||
|
quote := p.s[i]
|
||||||
|
i++
|
||||||
|
|
||||||
|
loop:
|
||||||
|
for i < len(p.s) {
|
||||||
|
switch p.s[i] {
|
||||||
|
case '\\':
|
||||||
|
if len(p.s) > i+1 {
|
||||||
|
switch c := p.s[i+1]; c {
|
||||||
|
case '\r':
|
||||||
|
if len(p.s) > i+2 && p.s[i+2] == '\n' {
|
||||||
|
i += 3
|
||||||
|
continue loop
|
||||||
|
}
|
||||||
|
fallthrough
|
||||||
|
case '\n', '\f':
|
||||||
|
i += 2
|
||||||
|
continue loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p.i = i
|
||||||
|
val, err := p.parseEscape()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
i = p.i
|
||||||
|
result += val
|
||||||
|
case quote:
|
||||||
|
break loop
|
||||||
|
case '\r', '\n', '\f':
|
||||||
|
return "", errors.New("unexpected end of line in string")
|
||||||
|
default:
|
||||||
|
start := i
|
||||||
|
for i < len(p.s) {
|
||||||
|
if c := p.s[i]; c == quote || c == '\\' || c == '\r' || c == '\n' || c == '\f' {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
result += p.s[start:i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if i >= len(p.s) {
|
||||||
|
return "", errors.New("EOF in string")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Consume the final quote.
|
||||||
|
i++
|
||||||
|
|
||||||
|
p.i = i
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseRegex parses a regular expression; the end is defined by encountering an
|
||||||
|
// unmatched closing ')' or ']' which is not consumed
|
||||||
|
func (p *parser) parseRegex() (rx *regexp.Regexp, err error) {
|
||||||
|
i := p.i
|
||||||
|
if len(p.s) < i+2 {
|
||||||
|
return nil, errors.New("expected regular expression, found EOF instead")
|
||||||
|
}
|
||||||
|
|
||||||
|
// number of open parens or brackets;
|
||||||
|
// when it becomes negative, finished parsing regex
|
||||||
|
open := 0
|
||||||
|
|
||||||
|
loop:
|
||||||
|
for i < len(p.s) {
|
||||||
|
switch p.s[i] {
|
||||||
|
case '(', '[':
|
||||||
|
open++
|
||||||
|
case ')', ']':
|
||||||
|
open--
|
||||||
|
if open < 0 {
|
||||||
|
break loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
if i >= len(p.s) {
|
||||||
|
return nil, errors.New("EOF in regular expression")
|
||||||
|
}
|
||||||
|
rx, err = regexp.Compile(p.s[p.i:i])
|
||||||
|
p.i = i
|
||||||
|
return rx, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// skipWhitespace consumes whitespace characters and comments.
|
||||||
|
// It returns true if there was actually anything to skip.
|
||||||
|
func (p *parser) skipWhitespace() bool {
|
||||||
|
i := p.i
|
||||||
|
for i < len(p.s) {
|
||||||
|
switch p.s[i] {
|
||||||
|
case ' ', '\t', '\r', '\n', '\f':
|
||||||
|
i++
|
||||||
|
continue
|
||||||
|
case '/':
|
||||||
|
if strings.HasPrefix(p.s[i:], "/*") {
|
||||||
|
end := strings.Index(p.s[i+len("/*"):], "*/")
|
||||||
|
if end != -1 {
|
||||||
|
i += end + len("/**/")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if i > p.i {
|
||||||
|
p.i = i
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// consumeParenthesis consumes an opening parenthesis and any following
|
||||||
|
// whitespace. It returns true if there was actually a parenthesis to skip.
|
||||||
|
func (p *parser) consumeParenthesis() bool {
|
||||||
|
if p.i < len(p.s) && p.s[p.i] == '(' {
|
||||||
|
p.i++
|
||||||
|
p.skipWhitespace()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// consumeClosingParenthesis consumes a closing parenthesis and any preceding
|
||||||
|
// whitespace. It returns true if there was actually a parenthesis to skip.
|
||||||
|
func (p *parser) consumeClosingParenthesis() bool {
|
||||||
|
i := p.i
|
||||||
|
p.skipWhitespace()
|
||||||
|
if p.i < len(p.s) && p.s[p.i] == ')' {
|
||||||
|
p.i++
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
p.i = i
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTypeSelector parses a type selector (one that matches by tag name).
|
||||||
|
func (p *parser) parseTypeSelector() (result Selector, err error) {
|
||||||
|
tag, err := p.parseIdentifier()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return typeSelector(tag), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseIDSelector parses a selector that matches by id attribute.
|
||||||
|
func (p *parser) parseIDSelector() (Selector, error) {
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, fmt.Errorf("expected id selector (#id), found EOF instead")
|
||||||
|
}
|
||||||
|
if p.s[p.i] != '#' {
|
||||||
|
return nil, fmt.Errorf("expected id selector (#id), found '%c' instead", p.s[p.i])
|
||||||
|
}
|
||||||
|
|
||||||
|
p.i++
|
||||||
|
id, err := p.parseName()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return attributeEqualsSelector("id", id), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseClassSelector parses a selector that matches by class attribute.
|
||||||
|
func (p *parser) parseClassSelector() (Selector, error) {
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, fmt.Errorf("expected class selector (.class), found EOF instead")
|
||||||
|
}
|
||||||
|
if p.s[p.i] != '.' {
|
||||||
|
return nil, fmt.Errorf("expected class selector (.class), found '%c' instead", p.s[p.i])
|
||||||
|
}
|
||||||
|
|
||||||
|
p.i++
|
||||||
|
class, err := p.parseIdentifier()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return attributeIncludesSelector("class", class), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseAttributeSelector parses a selector that matches by attribute value.
|
||||||
|
func (p *parser) parseAttributeSelector() (Selector, error) {
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, fmt.Errorf("expected attribute selector ([attribute]), found EOF instead")
|
||||||
|
}
|
||||||
|
if p.s[p.i] != '[' {
|
||||||
|
return nil, fmt.Errorf("expected attribute selector ([attribute]), found '%c' instead", p.s[p.i])
|
||||||
|
}
|
||||||
|
|
||||||
|
p.i++
|
||||||
|
p.skipWhitespace()
|
||||||
|
key, err := p.parseIdentifier()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
p.skipWhitespace()
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, errors.New("unexpected EOF in attribute selector")
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.s[p.i] == ']' {
|
||||||
|
p.i++
|
||||||
|
return attributeExistsSelector(key), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.i+2 >= len(p.s) {
|
||||||
|
return nil, errors.New("unexpected EOF in attribute selector")
|
||||||
|
}
|
||||||
|
|
||||||
|
op := p.s[p.i : p.i+2]
|
||||||
|
if op[0] == '=' {
|
||||||
|
op = "="
|
||||||
|
} else if op[1] != '=' {
|
||||||
|
return nil, fmt.Errorf(`expected equality operator, found "%s" instead`, op)
|
||||||
|
}
|
||||||
|
p.i += len(op)
|
||||||
|
|
||||||
|
p.skipWhitespace()
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, errors.New("unexpected EOF in attribute selector")
|
||||||
|
}
|
||||||
|
var val string
|
||||||
|
var rx *regexp.Regexp
|
||||||
|
if op == "#=" {
|
||||||
|
rx, err = p.parseRegex()
|
||||||
|
} else {
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case '\'', '"':
|
||||||
|
val, err = p.parseString()
|
||||||
|
default:
|
||||||
|
val, err = p.parseIdentifier()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
p.skipWhitespace()
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, errors.New("unexpected EOF in attribute selector")
|
||||||
|
}
|
||||||
|
if p.s[p.i] != ']' {
|
||||||
|
return nil, fmt.Errorf("expected ']', found '%c' instead", p.s[p.i])
|
||||||
|
}
|
||||||
|
p.i++
|
||||||
|
|
||||||
|
switch op {
|
||||||
|
case "=":
|
||||||
|
return attributeEqualsSelector(key, val), nil
|
||||||
|
case "!=":
|
||||||
|
return attributeNotEqualSelector(key, val), nil
|
||||||
|
case "~=":
|
||||||
|
return attributeIncludesSelector(key, val), nil
|
||||||
|
case "|=":
|
||||||
|
return attributeDashmatchSelector(key, val), nil
|
||||||
|
case "^=":
|
||||||
|
return attributePrefixSelector(key, val), nil
|
||||||
|
case "$=":
|
||||||
|
return attributeSuffixSelector(key, val), nil
|
||||||
|
case "*=":
|
||||||
|
return attributeSubstringSelector(key, val), nil
|
||||||
|
case "#=":
|
||||||
|
return attributeRegexSelector(key, rx), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("attribute operator %q is not supported", op)
|
||||||
|
}
|
||||||
|
|
||||||
|
var errExpectedParenthesis = errors.New("expected '(' but didn't find it")
|
||||||
|
var errExpectedClosingParenthesis = errors.New("expected ')' but didn't find it")
|
||||||
|
var errUnmatchedParenthesis = errors.New("unmatched '('")
|
||||||
|
|
||||||
|
// parsePseudoclassSelector parses a pseudoclass selector like :not(p).
|
||||||
|
func (p *parser) parsePseudoclassSelector() (Selector, error) {
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, fmt.Errorf("expected pseudoclass selector (:pseudoclass), found EOF instead")
|
||||||
|
}
|
||||||
|
if p.s[p.i] != ':' {
|
||||||
|
return nil, fmt.Errorf("expected attribute selector (:pseudoclass), found '%c' instead", p.s[p.i])
|
||||||
|
}
|
||||||
|
|
||||||
|
p.i++
|
||||||
|
name, err := p.parseIdentifier()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
name = toLowerASCII(name)
|
||||||
|
|
||||||
|
switch name {
|
||||||
|
case "not", "has", "haschild":
|
||||||
|
if !p.consumeParenthesis() {
|
||||||
|
return nil, errExpectedParenthesis
|
||||||
|
}
|
||||||
|
sel, parseErr := p.parseSelectorGroup()
|
||||||
|
if parseErr != nil {
|
||||||
|
return nil, parseErr
|
||||||
|
}
|
||||||
|
if !p.consumeClosingParenthesis() {
|
||||||
|
return nil, errExpectedClosingParenthesis
|
||||||
|
}
|
||||||
|
|
||||||
|
switch name {
|
||||||
|
case "not":
|
||||||
|
return negatedSelector(sel), nil
|
||||||
|
case "has":
|
||||||
|
return hasDescendantSelector(sel), nil
|
||||||
|
case "haschild":
|
||||||
|
return hasChildSelector(sel), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
case "contains", "containsown":
|
||||||
|
if !p.consumeParenthesis() {
|
||||||
|
return nil, errExpectedParenthesis
|
||||||
|
}
|
||||||
|
if p.i == len(p.s) {
|
||||||
|
return nil, errUnmatchedParenthesis
|
||||||
|
}
|
||||||
|
var val string
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case '\'', '"':
|
||||||
|
val, err = p.parseString()
|
||||||
|
default:
|
||||||
|
val, err = p.parseIdentifier()
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
val = strings.ToLower(val)
|
||||||
|
p.skipWhitespace()
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, errors.New("unexpected EOF in pseudo selector")
|
||||||
|
}
|
||||||
|
if !p.consumeClosingParenthesis() {
|
||||||
|
return nil, errExpectedClosingParenthesis
|
||||||
|
}
|
||||||
|
|
||||||
|
switch name {
|
||||||
|
case "contains":
|
||||||
|
return textSubstrSelector(val), nil
|
||||||
|
case "containsown":
|
||||||
|
return ownTextSubstrSelector(val), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
case "matches", "matchesown":
|
||||||
|
if !p.consumeParenthesis() {
|
||||||
|
return nil, errExpectedParenthesis
|
||||||
|
}
|
||||||
|
rx, err := p.parseRegex()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, errors.New("unexpected EOF in pseudo selector")
|
||||||
|
}
|
||||||
|
if !p.consumeClosingParenthesis() {
|
||||||
|
return nil, errExpectedClosingParenthesis
|
||||||
|
}
|
||||||
|
|
||||||
|
switch name {
|
||||||
|
case "matches":
|
||||||
|
return textRegexSelector(rx), nil
|
||||||
|
case "matchesown":
|
||||||
|
return ownTextRegexSelector(rx), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
case "nth-child", "nth-last-child", "nth-of-type", "nth-last-of-type":
|
||||||
|
if !p.consumeParenthesis() {
|
||||||
|
return nil, errExpectedParenthesis
|
||||||
|
}
|
||||||
|
a, b, err := p.parseNth()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if !p.consumeClosingParenthesis() {
|
||||||
|
return nil, errExpectedClosingParenthesis
|
||||||
|
}
|
||||||
|
if a == 0 {
|
||||||
|
switch name {
|
||||||
|
case "nth-child":
|
||||||
|
return simpleNthChildSelector(b, false), nil
|
||||||
|
case "nth-of-type":
|
||||||
|
return simpleNthChildSelector(b, true), nil
|
||||||
|
case "nth-last-child":
|
||||||
|
return simpleNthLastChildSelector(b, false), nil
|
||||||
|
case "nth-last-of-type":
|
||||||
|
return simpleNthLastChildSelector(b, true), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nthChildSelector(a, b,
|
||||||
|
name == "nth-last-child" || name == "nth-last-of-type",
|
||||||
|
name == "nth-of-type" || name == "nth-last-of-type"),
|
||||||
|
nil
|
||||||
|
|
||||||
|
case "first-child":
|
||||||
|
return simpleNthChildSelector(1, false), nil
|
||||||
|
case "last-child":
|
||||||
|
return simpleNthLastChildSelector(1, false), nil
|
||||||
|
case "first-of-type":
|
||||||
|
return simpleNthChildSelector(1, true), nil
|
||||||
|
case "last-of-type":
|
||||||
|
return simpleNthLastChildSelector(1, true), nil
|
||||||
|
case "only-child":
|
||||||
|
return onlyChildSelector(false), nil
|
||||||
|
case "only-of-type":
|
||||||
|
return onlyChildSelector(true), nil
|
||||||
|
case "input":
|
||||||
|
return inputSelector, nil
|
||||||
|
case "empty":
|
||||||
|
return emptyElementSelector, nil
|
||||||
|
case "root":
|
||||||
|
return rootSelector, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("unknown pseudoclass :%s", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseInteger parses a decimal integer.
|
||||||
|
func (p *parser) parseInteger() (int, error) {
|
||||||
|
i := p.i
|
||||||
|
start := i
|
||||||
|
for i < len(p.s) && '0' <= p.s[i] && p.s[i] <= '9' {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if i == start {
|
||||||
|
return 0, errors.New("expected integer, but didn't find it")
|
||||||
|
}
|
||||||
|
p.i = i
|
||||||
|
|
||||||
|
val, err := strconv.Atoi(p.s[start:i])
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return val, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseNth parses the argument for :nth-child (normally of the form an+b).
|
||||||
|
func (p *parser) parseNth() (a, b int, err error) {
|
||||||
|
// initial state
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
goto eof
|
||||||
|
}
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case '-':
|
||||||
|
p.i++
|
||||||
|
goto negativeA
|
||||||
|
case '+':
|
||||||
|
p.i++
|
||||||
|
goto positiveA
|
||||||
|
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
goto positiveA
|
||||||
|
case 'n', 'N':
|
||||||
|
a = 1
|
||||||
|
p.i++
|
||||||
|
goto readN
|
||||||
|
case 'o', 'O', 'e', 'E':
|
||||||
|
id, nameErr := p.parseName()
|
||||||
|
if nameErr != nil {
|
||||||
|
return 0, 0, nameErr
|
||||||
|
}
|
||||||
|
id = toLowerASCII(id)
|
||||||
|
if id == "odd" {
|
||||||
|
return 2, 1, nil
|
||||||
|
}
|
||||||
|
if id == "even" {
|
||||||
|
return 2, 0, nil
|
||||||
|
}
|
||||||
|
return 0, 0, fmt.Errorf("expected 'odd' or 'even', but found '%s' instead", id)
|
||||||
|
default:
|
||||||
|
goto invalid
|
||||||
|
}
|
||||||
|
|
||||||
|
positiveA:
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
goto eof
|
||||||
|
}
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
a, err = p.parseInteger()
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, err
|
||||||
|
}
|
||||||
|
goto readA
|
||||||
|
case 'n', 'N':
|
||||||
|
a = 1
|
||||||
|
p.i++
|
||||||
|
goto readN
|
||||||
|
default:
|
||||||
|
goto invalid
|
||||||
|
}
|
||||||
|
|
||||||
|
negativeA:
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
goto eof
|
||||||
|
}
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
a, err = p.parseInteger()
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, err
|
||||||
|
}
|
||||||
|
a = -a
|
||||||
|
goto readA
|
||||||
|
case 'n', 'N':
|
||||||
|
a = -1
|
||||||
|
p.i++
|
||||||
|
goto readN
|
||||||
|
default:
|
||||||
|
goto invalid
|
||||||
|
}
|
||||||
|
|
||||||
|
readA:
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
goto eof
|
||||||
|
}
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case 'n', 'N':
|
||||||
|
p.i++
|
||||||
|
goto readN
|
||||||
|
default:
|
||||||
|
// The number we read as a is actually b.
|
||||||
|
return 0, a, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
readN:
|
||||||
|
p.skipWhitespace()
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
goto eof
|
||||||
|
}
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case '+':
|
||||||
|
p.i++
|
||||||
|
p.skipWhitespace()
|
||||||
|
b, err = p.parseInteger()
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, err
|
||||||
|
}
|
||||||
|
return a, b, nil
|
||||||
|
case '-':
|
||||||
|
p.i++
|
||||||
|
p.skipWhitespace()
|
||||||
|
b, err = p.parseInteger()
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, err
|
||||||
|
}
|
||||||
|
return a, -b, nil
|
||||||
|
default:
|
||||||
|
return a, 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
eof:
|
||||||
|
return 0, 0, errors.New("unexpected EOF while attempting to parse expression of form an+b")
|
||||||
|
|
||||||
|
invalid:
|
||||||
|
return 0, 0, errors.New("unexpected character while attempting to parse expression of form an+b")
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseSimpleSelectorSequence parses a selector sequence that applies to
|
||||||
|
// a single element.
|
||||||
|
func (p *parser) parseSimpleSelectorSequence() (Selector, error) {
|
||||||
|
var result Selector
|
||||||
|
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return nil, errors.New("expected selector, found EOF instead")
|
||||||
|
}
|
||||||
|
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case '*':
|
||||||
|
// It's the universal selector. Just skip over it, since it doesn't affect the meaning.
|
||||||
|
p.i++
|
||||||
|
case '#', '.', '[', ':':
|
||||||
|
// There's no type selector. Wait to process the other till the main loop.
|
||||||
|
default:
|
||||||
|
r, err := p.parseTypeSelector()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
result = r
|
||||||
|
}
|
||||||
|
|
||||||
|
loop:
|
||||||
|
for p.i < len(p.s) {
|
||||||
|
var ns Selector
|
||||||
|
var err error
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case '#':
|
||||||
|
ns, err = p.parseIDSelector()
|
||||||
|
case '.':
|
||||||
|
ns, err = p.parseClassSelector()
|
||||||
|
case '[':
|
||||||
|
ns, err = p.parseAttributeSelector()
|
||||||
|
case ':':
|
||||||
|
ns, err = p.parsePseudoclassSelector()
|
||||||
|
default:
|
||||||
|
break loop
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if result == nil {
|
||||||
|
result = ns
|
||||||
|
} else {
|
||||||
|
result = intersectionSelector(result, ns)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if result == nil {
|
||||||
|
result = func(n *html.Node) bool {
|
||||||
|
return n.Type == html.ElementNode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseSelector parses a selector that may include combinators.
|
||||||
|
func (p *parser) parseSelector() (result Selector, err error) {
|
||||||
|
p.skipWhitespace()
|
||||||
|
result, err = p.parseSimpleSelectorSequence()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
var combinator byte
|
||||||
|
if p.skipWhitespace() {
|
||||||
|
combinator = ' '
|
||||||
|
}
|
||||||
|
if p.i >= len(p.s) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
switch p.s[p.i] {
|
||||||
|
case '+', '>', '~':
|
||||||
|
combinator = p.s[p.i]
|
||||||
|
p.i++
|
||||||
|
p.skipWhitespace()
|
||||||
|
case ',', ')':
|
||||||
|
// These characters can't begin a selector, but they can legally occur after one.
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if combinator == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c, err := p.parseSimpleSelectorSequence()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch combinator {
|
||||||
|
case ' ':
|
||||||
|
result = descendantSelector(result, c)
|
||||||
|
case '>':
|
||||||
|
result = childSelector(result, c)
|
||||||
|
case '+':
|
||||||
|
result = siblingSelector(result, c, true)
|
||||||
|
case '~':
|
||||||
|
result = siblingSelector(result, c, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
panic("unreachable")
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseSelectorGroup parses a group of selectors, separated by commas.
|
||||||
|
func (p *parser) parseSelectorGroup() (result Selector, err error) {
|
||||||
|
result, err = p.parseSelector()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for p.i < len(p.s) {
|
||||||
|
if p.s[p.i] != ',' {
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
p.i++
|
||||||
|
c, err := p.parseSelector()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
result = unionSelector(result, c)
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
622
vendor/github.com/andybalholm/cascadia/selector.go
generated
vendored
Normal file
622
vendor/github.com/andybalholm/cascadia/selector.go
generated
vendored
Normal file
@ -0,0 +1,622 @@
|
|||||||
|
package cascadia
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
// the Selector type, and functions for creating them
|
||||||
|
|
||||||
|
// A Selector is a function which tells whether a node matches or not.
|
||||||
|
type Selector func(*html.Node) bool
|
||||||
|
|
||||||
|
// hasChildMatch returns whether n has any child that matches a.
|
||||||
|
func hasChildMatch(n *html.Node, a Selector) bool {
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if a(c) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasDescendantMatch performs a depth-first search of n's descendants,
|
||||||
|
// testing whether any of them match a. It returns true as soon as a match is
|
||||||
|
// found, or false if no match is found.
|
||||||
|
func hasDescendantMatch(n *html.Node, a Selector) bool {
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if a(c) || (c.Type == html.ElementNode && hasDescendantMatch(c, a)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compile parses a selector and returns, if successful, a Selector object
|
||||||
|
// that can be used to match against html.Node objects.
|
||||||
|
func Compile(sel string) (Selector, error) {
|
||||||
|
p := &parser{s: sel}
|
||||||
|
compiled, err := p.parseSelectorGroup()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.i < len(sel) {
|
||||||
|
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
|
||||||
|
}
|
||||||
|
|
||||||
|
return compiled, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustCompile is like Compile, but panics instead of returning an error.
|
||||||
|
func MustCompile(sel string) Selector {
|
||||||
|
compiled, err := Compile(sel)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return compiled
|
||||||
|
}
|
||||||
|
|
||||||
|
// MatchAll returns a slice of the nodes that match the selector,
|
||||||
|
// from n and its children.
|
||||||
|
func (s Selector) MatchAll(n *html.Node) []*html.Node {
|
||||||
|
return s.matchAllInto(n, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s Selector) matchAllInto(n *html.Node, storage []*html.Node) []*html.Node {
|
||||||
|
if s(n) {
|
||||||
|
storage = append(storage, n)
|
||||||
|
}
|
||||||
|
|
||||||
|
for child := n.FirstChild; child != nil; child = child.NextSibling {
|
||||||
|
storage = s.matchAllInto(child, storage)
|
||||||
|
}
|
||||||
|
|
||||||
|
return storage
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match returns true if the node matches the selector.
|
||||||
|
func (s Selector) Match(n *html.Node) bool {
|
||||||
|
return s(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MatchFirst returns the first node that matches s, from n and its children.
|
||||||
|
func (s Selector) MatchFirst(n *html.Node) *html.Node {
|
||||||
|
if s.Match(n) {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
m := s.MatchFirst(c)
|
||||||
|
if m != nil {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter returns the nodes in nodes that match the selector.
|
||||||
|
func (s Selector) Filter(nodes []*html.Node) (result []*html.Node) {
|
||||||
|
for _, n := range nodes {
|
||||||
|
if s(n) {
|
||||||
|
result = append(result, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// typeSelector returns a Selector that matches elements with a given tag name.
|
||||||
|
func typeSelector(tag string) Selector {
|
||||||
|
tag = toLowerASCII(tag)
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
return n.Type == html.ElementNode && n.Data == tag
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// toLowerASCII returns s with all ASCII capital letters lowercased.
|
||||||
|
func toLowerASCII(s string) string {
|
||||||
|
var b []byte
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
if c := s[i]; 'A' <= c && c <= 'Z' {
|
||||||
|
if b == nil {
|
||||||
|
b = make([]byte, len(s))
|
||||||
|
copy(b, s)
|
||||||
|
}
|
||||||
|
b[i] = s[i] + ('a' - 'A')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if b == nil {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributeSelector returns a Selector that matches elements
|
||||||
|
// where the attribute named key satisifes the function f.
|
||||||
|
func attributeSelector(key string, f func(string) bool) Selector {
|
||||||
|
key = toLowerASCII(key)
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, a := range n.Attr {
|
||||||
|
if a.Key == key && f(a.Val) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributeExistsSelector returns a Selector that matches elements that have
|
||||||
|
// an attribute named key.
|
||||||
|
func attributeExistsSelector(key string) Selector {
|
||||||
|
return attributeSelector(key, func(string) bool { return true })
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributeEqualsSelector returns a Selector that matches elements where
|
||||||
|
// the attribute named key has the value val.
|
||||||
|
func attributeEqualsSelector(key, val string) Selector {
|
||||||
|
return attributeSelector(key,
|
||||||
|
func(s string) bool {
|
||||||
|
return s == val
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributeNotEqualSelector returns a Selector that matches elements where
|
||||||
|
// the attribute named key does not have the value val.
|
||||||
|
func attributeNotEqualSelector(key, val string) Selector {
|
||||||
|
key = toLowerASCII(key)
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, a := range n.Attr {
|
||||||
|
if a.Key == key && a.Val == val {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributeIncludesSelector returns a Selector that matches elements where
|
||||||
|
// the attribute named key is a whitespace-separated list that includes val.
|
||||||
|
func attributeIncludesSelector(key, val string) Selector {
|
||||||
|
return attributeSelector(key,
|
||||||
|
func(s string) bool {
|
||||||
|
for s != "" {
|
||||||
|
i := strings.IndexAny(s, " \t\r\n\f")
|
||||||
|
if i == -1 {
|
||||||
|
return s == val
|
||||||
|
}
|
||||||
|
if s[:i] == val {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
s = s[i+1:]
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributeDashmatchSelector returns a Selector that matches elements where
|
||||||
|
// the attribute named key equals val or starts with val plus a hyphen.
|
||||||
|
func attributeDashmatchSelector(key, val string) Selector {
|
||||||
|
return attributeSelector(key,
|
||||||
|
func(s string) bool {
|
||||||
|
if s == val {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if len(s) <= len(val) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if s[:len(val)] == val && s[len(val)] == '-' {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributePrefixSelector returns a Selector that matches elements where
|
||||||
|
// the attribute named key starts with val.
|
||||||
|
func attributePrefixSelector(key, val string) Selector {
|
||||||
|
return attributeSelector(key,
|
||||||
|
func(s string) bool {
|
||||||
|
if strings.TrimSpace(s) == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return strings.HasPrefix(s, val)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributeSuffixSelector returns a Selector that matches elements where
|
||||||
|
// the attribute named key ends with val.
|
||||||
|
func attributeSuffixSelector(key, val string) Selector {
|
||||||
|
return attributeSelector(key,
|
||||||
|
func(s string) bool {
|
||||||
|
if strings.TrimSpace(s) == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return strings.HasSuffix(s, val)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributeSubstringSelector returns a Selector that matches nodes where
|
||||||
|
// the attribute named key contains val.
|
||||||
|
func attributeSubstringSelector(key, val string) Selector {
|
||||||
|
return attributeSelector(key,
|
||||||
|
func(s string) bool {
|
||||||
|
if strings.TrimSpace(s) == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return strings.Contains(s, val)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// attributeRegexSelector returns a Selector that matches nodes where
|
||||||
|
// the attribute named key matches the regular expression rx
|
||||||
|
func attributeRegexSelector(key string, rx *regexp.Regexp) Selector {
|
||||||
|
return attributeSelector(key,
|
||||||
|
func(s string) bool {
|
||||||
|
return rx.MatchString(s)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// intersectionSelector returns a selector that matches nodes that match
|
||||||
|
// both a and b.
|
||||||
|
func intersectionSelector(a, b Selector) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
return a(n) && b(n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// unionSelector returns a selector that matches elements that match
|
||||||
|
// either a or b.
|
||||||
|
func unionSelector(a, b Selector) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
return a(n) || b(n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// negatedSelector returns a selector that matches elements that do not match a.
|
||||||
|
func negatedSelector(a Selector) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return !a(n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeNodeText writes the text contained in n and its descendants to b.
|
||||||
|
func writeNodeText(n *html.Node, b *bytes.Buffer) {
|
||||||
|
switch n.Type {
|
||||||
|
case html.TextNode:
|
||||||
|
b.WriteString(n.Data)
|
||||||
|
case html.ElementNode:
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
writeNodeText(c, b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// nodeText returns the text contained in n and its descendants.
|
||||||
|
func nodeText(n *html.Node) string {
|
||||||
|
var b bytes.Buffer
|
||||||
|
writeNodeText(n, &b)
|
||||||
|
return b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// nodeOwnText returns the contents of the text nodes that are direct
|
||||||
|
// children of n.
|
||||||
|
func nodeOwnText(n *html.Node) string {
|
||||||
|
var b bytes.Buffer
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if c.Type == html.TextNode {
|
||||||
|
b.WriteString(c.Data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// textSubstrSelector returns a selector that matches nodes that
|
||||||
|
// contain the given text.
|
||||||
|
func textSubstrSelector(val string) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
text := strings.ToLower(nodeText(n))
|
||||||
|
return strings.Contains(text, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ownTextSubstrSelector returns a selector that matches nodes that
|
||||||
|
// directly contain the given text
|
||||||
|
func ownTextSubstrSelector(val string) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
text := strings.ToLower(nodeOwnText(n))
|
||||||
|
return strings.Contains(text, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// textRegexSelector returns a selector that matches nodes whose text matches
|
||||||
|
// the specified regular expression
|
||||||
|
func textRegexSelector(rx *regexp.Regexp) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
return rx.MatchString(nodeText(n))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ownTextRegexSelector returns a selector that matches nodes whose text
|
||||||
|
// directly matches the specified regular expression
|
||||||
|
func ownTextRegexSelector(rx *regexp.Regexp) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
return rx.MatchString(nodeOwnText(n))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasChildSelector returns a selector that matches elements
|
||||||
|
// with a child that matches a.
|
||||||
|
func hasChildSelector(a Selector) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return hasChildMatch(n, a)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasDescendantSelector returns a selector that matches elements
|
||||||
|
// with any descendant that matches a.
|
||||||
|
func hasDescendantSelector(a Selector) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return hasDescendantMatch(n, a)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// nthChildSelector returns a selector that implements :nth-child(an+b).
|
||||||
|
// If last is true, implements :nth-last-child instead.
|
||||||
|
// If ofType is true, implements :nth-of-type instead.
|
||||||
|
func nthChildSelector(a, b int, last, ofType bool) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
parent := n.Parent
|
||||||
|
if parent == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if parent.Type == html.DocumentNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
i := -1
|
||||||
|
count := 0
|
||||||
|
for c := parent.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
count++
|
||||||
|
if c == n {
|
||||||
|
i = count
|
||||||
|
if !last {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if i == -1 {
|
||||||
|
// This shouldn't happen, since n should always be one of its parent's children.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if last {
|
||||||
|
i = count - i + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
i -= b
|
||||||
|
if a == 0 {
|
||||||
|
return i == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
return i%a == 0 && i/a >= 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// simpleNthChildSelector returns a selector that implements :nth-child(b).
|
||||||
|
// If ofType is true, implements :nth-of-type instead.
|
||||||
|
func simpleNthChildSelector(b int, ofType bool) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
parent := n.Parent
|
||||||
|
if parent == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if parent.Type == html.DocumentNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
count := 0
|
||||||
|
for c := parent.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if c.Type != html.ElementNode || (ofType && c.Data != n.Data) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
count++
|
||||||
|
if c == n {
|
||||||
|
return count == b
|
||||||
|
}
|
||||||
|
if count >= b {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// simpleNthLastChildSelector returns a selector that implements
|
||||||
|
// :nth-last-child(b). If ofType is true, implements :nth-last-of-type
|
||||||
|
// instead.
|
||||||
|
func simpleNthLastChildSelector(b int, ofType bool) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
parent := n.Parent
|
||||||
|
if parent == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if parent.Type == html.DocumentNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
count := 0
|
||||||
|
for c := parent.LastChild; c != nil; c = c.PrevSibling {
|
||||||
|
if c.Type != html.ElementNode || (ofType && c.Data != n.Data) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
count++
|
||||||
|
if c == n {
|
||||||
|
return count == b
|
||||||
|
}
|
||||||
|
if count >= b {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// onlyChildSelector returns a selector that implements :only-child.
|
||||||
|
// If ofType is true, it implements :only-of-type instead.
|
||||||
|
func onlyChildSelector(ofType bool) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
parent := n.Parent
|
||||||
|
if parent == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if parent.Type == html.DocumentNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
count := 0
|
||||||
|
for c := parent.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
count++
|
||||||
|
if count > 1 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return count == 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// inputSelector is a Selector that matches input, select, textarea and button elements.
|
||||||
|
func inputSelector(n *html.Node) bool {
|
||||||
|
return n.Type == html.ElementNode && (n.Data == "input" || n.Data == "select" || n.Data == "textarea" || n.Data == "button")
|
||||||
|
}
|
||||||
|
|
||||||
|
// emptyElementSelector is a Selector that matches empty elements.
|
||||||
|
func emptyElementSelector(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
switch c.Type {
|
||||||
|
case html.ElementNode, html.TextNode:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// descendantSelector returns a Selector that matches an element if
|
||||||
|
// it matches d and has an ancestor that matches a.
|
||||||
|
func descendantSelector(a, d Selector) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if !d(n) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for p := n.Parent; p != nil; p = p.Parent {
|
||||||
|
if a(p) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// childSelector returns a Selector that matches an element if
|
||||||
|
// it matches d and its parent matches a.
|
||||||
|
func childSelector(a, d Selector) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
return d(n) && n.Parent != nil && a(n.Parent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// siblingSelector returns a Selector that matches an element
|
||||||
|
// if it matches s2 and in is preceded by an element that matches s1.
|
||||||
|
// If adjacent is true, the sibling must be immediately before the element.
|
||||||
|
func siblingSelector(s1, s2 Selector, adjacent bool) Selector {
|
||||||
|
return func(n *html.Node) bool {
|
||||||
|
if !s2(n) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if adjacent {
|
||||||
|
for n = n.PrevSibling; n != nil; n = n.PrevSibling {
|
||||||
|
if n.Type == html.TextNode || n.Type == html.CommentNode {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return s1(n)
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk backwards looking for element that matches s1
|
||||||
|
for c := n.PrevSibling; c != nil; c = c.PrevSibling {
|
||||||
|
if s1(c) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// rootSelector implements :root
|
||||||
|
func rootSelector(n *html.Node) bool {
|
||||||
|
if n.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if n.Parent == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return n.Parent.Type == html.DocumentNode
|
||||||
|
}
|
22
vendor/github.com/robfig/cron/.gitignore
generated
vendored
Normal file
22
vendor/github.com/robfig/cron/.gitignore
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||||
|
*.o
|
||||||
|
*.a
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Folders
|
||||||
|
_obj
|
||||||
|
_test
|
||||||
|
|
||||||
|
# Architecture specific extensions/prefixes
|
||||||
|
*.[568vq]
|
||||||
|
[568vq].out
|
||||||
|
|
||||||
|
*.cgo1.go
|
||||||
|
*.cgo2.c
|
||||||
|
_cgo_defun.c
|
||||||
|
_cgo_gotypes.go
|
||||||
|
_cgo_export.*
|
||||||
|
|
||||||
|
_testmain.go
|
||||||
|
|
||||||
|
*.exe
|
1
vendor/github.com/robfig/cron/.travis.yml
generated
vendored
Normal file
1
vendor/github.com/robfig/cron/.travis.yml
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
language: go
|
21
vendor/github.com/robfig/cron/LICENSE
generated
vendored
Normal file
21
vendor/github.com/robfig/cron/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
Copyright (C) 2012 Rob Figueiredo
|
||||||
|
All Rights Reserved.
|
||||||
|
|
||||||
|
MIT LICENSE
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
6
vendor/github.com/robfig/cron/README.md
generated
vendored
Normal file
6
vendor/github.com/robfig/cron/README.md
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
[![GoDoc](http://godoc.org/github.com/robfig/cron?status.png)](http://godoc.org/github.com/robfig/cron)
|
||||||
|
[![Build Status](https://travis-ci.org/robfig/cron.svg?branch=master)](https://travis-ci.org/robfig/cron)
|
||||||
|
|
||||||
|
# cron
|
||||||
|
|
||||||
|
Documentation here: https://godoc.org/github.com/robfig/cron
|
27
vendor/github.com/robfig/cron/constantdelay.go
generated
vendored
Normal file
27
vendor/github.com/robfig/cron/constantdelay.go
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
package cron
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
// ConstantDelaySchedule represents a simple recurring duty cycle, e.g. "Every 5 minutes".
|
||||||
|
// It does not support jobs more frequent than once a second.
|
||||||
|
type ConstantDelaySchedule struct {
|
||||||
|
Delay time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
// Every returns a crontab Schedule that activates once every duration.
|
||||||
|
// Delays of less than a second are not supported (will round up to 1 second).
|
||||||
|
// Any fields less than a Second are truncated.
|
||||||
|
func Every(duration time.Duration) ConstantDelaySchedule {
|
||||||
|
if duration < time.Second {
|
||||||
|
duration = time.Second
|
||||||
|
}
|
||||||
|
return ConstantDelaySchedule{
|
||||||
|
Delay: duration - time.Duration(duration.Nanoseconds())%time.Second,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next returns the next time this should be run.
|
||||||
|
// This rounds so that the next activation time will be on the second.
|
||||||
|
func (schedule ConstantDelaySchedule) Next(t time.Time) time.Time {
|
||||||
|
return t.Add(schedule.Delay - time.Duration(t.Nanosecond())*time.Nanosecond)
|
||||||
|
}
|
259
vendor/github.com/robfig/cron/cron.go
generated
vendored
Normal file
259
vendor/github.com/robfig/cron/cron.go
generated
vendored
Normal file
@ -0,0 +1,259 @@
|
|||||||
|
package cron
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"runtime"
|
||||||
|
"sort"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Cron keeps track of any number of entries, invoking the associated func as
|
||||||
|
// specified by the schedule. It may be started, stopped, and the entries may
|
||||||
|
// be inspected while running.
|
||||||
|
type Cron struct {
|
||||||
|
entries []*Entry
|
||||||
|
stop chan struct{}
|
||||||
|
add chan *Entry
|
||||||
|
snapshot chan []*Entry
|
||||||
|
running bool
|
||||||
|
ErrorLog *log.Logger
|
||||||
|
location *time.Location
|
||||||
|
}
|
||||||
|
|
||||||
|
// Job is an interface for submitted cron jobs.
|
||||||
|
type Job interface {
|
||||||
|
Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// The Schedule describes a job's duty cycle.
|
||||||
|
type Schedule interface {
|
||||||
|
// Return the next activation time, later than the given time.
|
||||||
|
// Next is invoked initially, and then each time the job is run.
|
||||||
|
Next(time.Time) time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// Entry consists of a schedule and the func to execute on that schedule.
|
||||||
|
type Entry struct {
|
||||||
|
// The schedule on which this job should be run.
|
||||||
|
Schedule Schedule
|
||||||
|
|
||||||
|
// The next time the job will run. This is the zero time if Cron has not been
|
||||||
|
// started or this entry's schedule is unsatisfiable
|
||||||
|
Next time.Time
|
||||||
|
|
||||||
|
// The last time this job was run. This is the zero time if the job has never
|
||||||
|
// been run.
|
||||||
|
Prev time.Time
|
||||||
|
|
||||||
|
// The Job to run.
|
||||||
|
Job Job
|
||||||
|
}
|
||||||
|
|
||||||
|
// byTime is a wrapper for sorting the entry array by time
|
||||||
|
// (with zero time at the end).
|
||||||
|
type byTime []*Entry
|
||||||
|
|
||||||
|
func (s byTime) Len() int { return len(s) }
|
||||||
|
func (s byTime) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
|
func (s byTime) Less(i, j int) bool {
|
||||||
|
// Two zero times should return false.
|
||||||
|
// Otherwise, zero is "greater" than any other time.
|
||||||
|
// (To sort it at the end of the list.)
|
||||||
|
if s[i].Next.IsZero() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if s[j].Next.IsZero() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return s[i].Next.Before(s[j].Next)
|
||||||
|
}
|
||||||
|
|
||||||
|
// New returns a new Cron job runner, in the Local time zone.
|
||||||
|
func New() *Cron {
|
||||||
|
return NewWithLocation(time.Now().Location())
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWithLocation returns a new Cron job runner.
|
||||||
|
func NewWithLocation(location *time.Location) *Cron {
|
||||||
|
return &Cron{
|
||||||
|
entries: nil,
|
||||||
|
add: make(chan *Entry),
|
||||||
|
stop: make(chan struct{}),
|
||||||
|
snapshot: make(chan []*Entry),
|
||||||
|
running: false,
|
||||||
|
ErrorLog: nil,
|
||||||
|
location: location,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A wrapper that turns a func() into a cron.Job
|
||||||
|
type FuncJob func()
|
||||||
|
|
||||||
|
func (f FuncJob) Run() { f() }
|
||||||
|
|
||||||
|
// AddFunc adds a func to the Cron to be run on the given schedule.
|
||||||
|
func (c *Cron) AddFunc(spec string, cmd func()) error {
|
||||||
|
return c.AddJob(spec, FuncJob(cmd))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddJob adds a Job to the Cron to be run on the given schedule.
|
||||||
|
func (c *Cron) AddJob(spec string, cmd Job) error {
|
||||||
|
schedule, err := Parse(spec)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
c.Schedule(schedule, cmd)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Schedule adds a Job to the Cron to be run on the given schedule.
|
||||||
|
func (c *Cron) Schedule(schedule Schedule, cmd Job) {
|
||||||
|
entry := &Entry{
|
||||||
|
Schedule: schedule,
|
||||||
|
Job: cmd,
|
||||||
|
}
|
||||||
|
if !c.running {
|
||||||
|
c.entries = append(c.entries, entry)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.add <- entry
|
||||||
|
}
|
||||||
|
|
||||||
|
// Entries returns a snapshot of the cron entries.
|
||||||
|
func (c *Cron) Entries() []*Entry {
|
||||||
|
if c.running {
|
||||||
|
c.snapshot <- nil
|
||||||
|
x := <-c.snapshot
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
return c.entrySnapshot()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Location gets the time zone location
|
||||||
|
func (c *Cron) Location() *time.Location {
|
||||||
|
return c.location
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the cron scheduler in its own go-routine, or no-op if already started.
|
||||||
|
func (c *Cron) Start() {
|
||||||
|
if c.running {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.running = true
|
||||||
|
go c.run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the cron scheduler, or no-op if already running.
|
||||||
|
func (c *Cron) Run() {
|
||||||
|
if c.running {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.running = true
|
||||||
|
c.run()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Cron) runWithRecovery(j Job) {
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
const size = 64 << 10
|
||||||
|
buf := make([]byte, size)
|
||||||
|
buf = buf[:runtime.Stack(buf, false)]
|
||||||
|
c.logf("cron: panic running job: %v\n%s", r, buf)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
j.Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the scheduler. this is private just due to the need to synchronize
|
||||||
|
// access to the 'running' state variable.
|
||||||
|
func (c *Cron) run() {
|
||||||
|
// Figure out the next activation times for each entry.
|
||||||
|
now := c.now()
|
||||||
|
for _, entry := range c.entries {
|
||||||
|
entry.Next = entry.Schedule.Next(now)
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
// Determine the next entry to run.
|
||||||
|
sort.Sort(byTime(c.entries))
|
||||||
|
|
||||||
|
var timer *time.Timer
|
||||||
|
if len(c.entries) == 0 || c.entries[0].Next.IsZero() {
|
||||||
|
// If there are no entries yet, just sleep - it still handles new entries
|
||||||
|
// and stop requests.
|
||||||
|
timer = time.NewTimer(100000 * time.Hour)
|
||||||
|
} else {
|
||||||
|
timer = time.NewTimer(c.entries[0].Next.Sub(now))
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case now = <-timer.C:
|
||||||
|
now = now.In(c.location)
|
||||||
|
// Run every entry whose next time was less than now
|
||||||
|
for _, e := range c.entries {
|
||||||
|
if e.Next.After(now) || e.Next.IsZero() {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
go c.runWithRecovery(e.Job)
|
||||||
|
e.Prev = e.Next
|
||||||
|
e.Next = e.Schedule.Next(now)
|
||||||
|
}
|
||||||
|
|
||||||
|
case newEntry := <-c.add:
|
||||||
|
timer.Stop()
|
||||||
|
now = c.now()
|
||||||
|
newEntry.Next = newEntry.Schedule.Next(now)
|
||||||
|
c.entries = append(c.entries, newEntry)
|
||||||
|
|
||||||
|
case <-c.snapshot:
|
||||||
|
c.snapshot <- c.entrySnapshot()
|
||||||
|
continue
|
||||||
|
|
||||||
|
case <-c.stop:
|
||||||
|
timer.Stop()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Logs an error to stderr or to the configured error log
|
||||||
|
func (c *Cron) logf(format string, args ...interface{}) {
|
||||||
|
if c.ErrorLog != nil {
|
||||||
|
c.ErrorLog.Printf(format, args...)
|
||||||
|
} else {
|
||||||
|
log.Printf(format, args...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop stops the cron scheduler if it is running; otherwise it does nothing.
|
||||||
|
func (c *Cron) Stop() {
|
||||||
|
if !c.running {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.stop <- struct{}{}
|
||||||
|
c.running = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// entrySnapshot returns a copy of the current cron entry list.
|
||||||
|
func (c *Cron) entrySnapshot() []*Entry {
|
||||||
|
entries := []*Entry{}
|
||||||
|
for _, e := range c.entries {
|
||||||
|
entries = append(entries, &Entry{
|
||||||
|
Schedule: e.Schedule,
|
||||||
|
Next: e.Next,
|
||||||
|
Prev: e.Prev,
|
||||||
|
Job: e.Job,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return entries
|
||||||
|
}
|
||||||
|
|
||||||
|
// now returns current time in c location
|
||||||
|
func (c *Cron) now() time.Time {
|
||||||
|
return time.Now().In(c.location)
|
||||||
|
}
|
129
vendor/github.com/robfig/cron/doc.go
generated
vendored
Normal file
129
vendor/github.com/robfig/cron/doc.go
generated
vendored
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
/*
|
||||||
|
Package cron implements a cron spec parser and job runner.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
|
||||||
|
Callers may register Funcs to be invoked on a given schedule. Cron will run
|
||||||
|
them in their own goroutines.
|
||||||
|
|
||||||
|
c := cron.New()
|
||||||
|
c.AddFunc("0 30 * * * *", func() { fmt.Println("Every hour on the half hour") })
|
||||||
|
c.AddFunc("@hourly", func() { fmt.Println("Every hour") })
|
||||||
|
c.AddFunc("@every 1h30m", func() { fmt.Println("Every hour thirty") })
|
||||||
|
c.Start()
|
||||||
|
..
|
||||||
|
// Funcs are invoked in their own goroutine, asynchronously.
|
||||||
|
...
|
||||||
|
// Funcs may also be added to a running Cron
|
||||||
|
c.AddFunc("@daily", func() { fmt.Println("Every day") })
|
||||||
|
..
|
||||||
|
// Inspect the cron job entries' next and previous run times.
|
||||||
|
inspect(c.Entries())
|
||||||
|
..
|
||||||
|
c.Stop() // Stop the scheduler (does not stop any jobs already running).
|
||||||
|
|
||||||
|
CRON Expression Format
|
||||||
|
|
||||||
|
A cron expression represents a set of times, using 6 space-separated fields.
|
||||||
|
|
||||||
|
Field name | Mandatory? | Allowed values | Allowed special characters
|
||||||
|
---------- | ---------- | -------------- | --------------------------
|
||||||
|
Seconds | Yes | 0-59 | * / , -
|
||||||
|
Minutes | Yes | 0-59 | * / , -
|
||||||
|
Hours | Yes | 0-23 | * / , -
|
||||||
|
Day of month | Yes | 1-31 | * / , - ?
|
||||||
|
Month | Yes | 1-12 or JAN-DEC | * / , -
|
||||||
|
Day of week | Yes | 0-6 or SUN-SAT | * / , - ?
|
||||||
|
|
||||||
|
Note: Month and Day-of-week field values are case insensitive. "SUN", "Sun",
|
||||||
|
and "sun" are equally accepted.
|
||||||
|
|
||||||
|
Special Characters
|
||||||
|
|
||||||
|
Asterisk ( * )
|
||||||
|
|
||||||
|
The asterisk indicates that the cron expression will match for all values of the
|
||||||
|
field; e.g., using an asterisk in the 5th field (month) would indicate every
|
||||||
|
month.
|
||||||
|
|
||||||
|
Slash ( / )
|
||||||
|
|
||||||
|
Slashes are used to describe increments of ranges. For example 3-59/15 in the
|
||||||
|
1st field (minutes) would indicate the 3rd minute of the hour and every 15
|
||||||
|
minutes thereafter. The form "*\/..." is equivalent to the form "first-last/...",
|
||||||
|
that is, an increment over the largest possible range of the field. The form
|
||||||
|
"N/..." is accepted as meaning "N-MAX/...", that is, starting at N, use the
|
||||||
|
increment until the end of that specific range. It does not wrap around.
|
||||||
|
|
||||||
|
Comma ( , )
|
||||||
|
|
||||||
|
Commas are used to separate items of a list. For example, using "MON,WED,FRI" in
|
||||||
|
the 5th field (day of week) would mean Mondays, Wednesdays and Fridays.
|
||||||
|
|
||||||
|
Hyphen ( - )
|
||||||
|
|
||||||
|
Hyphens are used to define ranges. For example, 9-17 would indicate every
|
||||||
|
hour between 9am and 5pm inclusive.
|
||||||
|
|
||||||
|
Question mark ( ? )
|
||||||
|
|
||||||
|
Question mark may be used instead of '*' for leaving either day-of-month or
|
||||||
|
day-of-week blank.
|
||||||
|
|
||||||
|
Predefined schedules
|
||||||
|
|
||||||
|
You may use one of several pre-defined schedules in place of a cron expression.
|
||||||
|
|
||||||
|
Entry | Description | Equivalent To
|
||||||
|
----- | ----------- | -------------
|
||||||
|
@yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 0 1 1 *
|
||||||
|
@monthly | Run once a month, midnight, first of month | 0 0 0 1 * *
|
||||||
|
@weekly | Run once a week, midnight between Sat/Sun | 0 0 0 * * 0
|
||||||
|
@daily (or @midnight) | Run once a day, midnight | 0 0 0 * * *
|
||||||
|
@hourly | Run once an hour, beginning of hour | 0 0 * * * *
|
||||||
|
|
||||||
|
Intervals
|
||||||
|
|
||||||
|
You may also schedule a job to execute at fixed intervals, starting at the time it's added
|
||||||
|
or cron is run. This is supported by formatting the cron spec like this:
|
||||||
|
|
||||||
|
@every <duration>
|
||||||
|
|
||||||
|
where "duration" is a string accepted by time.ParseDuration
|
||||||
|
(http://golang.org/pkg/time/#ParseDuration).
|
||||||
|
|
||||||
|
For example, "@every 1h30m10s" would indicate a schedule that activates after
|
||||||
|
1 hour, 30 minutes, 10 seconds, and then every interval after that.
|
||||||
|
|
||||||
|
Note: The interval does not take the job runtime into account. For example,
|
||||||
|
if a job takes 3 minutes to run, and it is scheduled to run every 5 minutes,
|
||||||
|
it will have only 2 minutes of idle time between each run.
|
||||||
|
|
||||||
|
Time zones
|
||||||
|
|
||||||
|
All interpretation and scheduling is done in the machine's local time zone (as
|
||||||
|
provided by the Go time package (http://www.golang.org/pkg/time).
|
||||||
|
|
||||||
|
Be aware that jobs scheduled during daylight-savings leap-ahead transitions will
|
||||||
|
not be run!
|
||||||
|
|
||||||
|
Thread safety
|
||||||
|
|
||||||
|
Since the Cron service runs concurrently with the calling code, some amount of
|
||||||
|
care must be taken to ensure proper synchronization.
|
||||||
|
|
||||||
|
All cron methods are designed to be correctly synchronized as long as the caller
|
||||||
|
ensures that invocations have a clear happens-before ordering between them.
|
||||||
|
|
||||||
|
Implementation
|
||||||
|
|
||||||
|
Cron entries are stored in an array, sorted by their next activation time. Cron
|
||||||
|
sleeps until the next job is due to be run.
|
||||||
|
|
||||||
|
Upon waking:
|
||||||
|
- it runs each entry that is active on that second
|
||||||
|
- it calculates the next run times for the jobs that were run
|
||||||
|
- it re-sorts the array of entries by next activation time.
|
||||||
|
- it goes to sleep until the soonest job.
|
||||||
|
*/
|
||||||
|
package cron
|
380
vendor/github.com/robfig/cron/parser.go
generated
vendored
Normal file
380
vendor/github.com/robfig/cron/parser.go
generated
vendored
Normal file
@ -0,0 +1,380 @@
|
|||||||
|
package cron
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Configuration options for creating a parser. Most options specify which
|
||||||
|
// fields should be included, while others enable features. If a field is not
|
||||||
|
// included the parser will assume a default value. These options do not change
|
||||||
|
// the order fields are parse in.
|
||||||
|
type ParseOption int
|
||||||
|
|
||||||
|
const (
|
||||||
|
Second ParseOption = 1 << iota // Seconds field, default 0
|
||||||
|
Minute // Minutes field, default 0
|
||||||
|
Hour // Hours field, default 0
|
||||||
|
Dom // Day of month field, default *
|
||||||
|
Month // Month field, default *
|
||||||
|
Dow // Day of week field, default *
|
||||||
|
DowOptional // Optional day of week field, default *
|
||||||
|
Descriptor // Allow descriptors such as @monthly, @weekly, etc.
|
||||||
|
)
|
||||||
|
|
||||||
|
var places = []ParseOption{
|
||||||
|
Second,
|
||||||
|
Minute,
|
||||||
|
Hour,
|
||||||
|
Dom,
|
||||||
|
Month,
|
||||||
|
Dow,
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaults = []string{
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"*",
|
||||||
|
"*",
|
||||||
|
"*",
|
||||||
|
}
|
||||||
|
|
||||||
|
// A custom Parser that can be configured.
|
||||||
|
type Parser struct {
|
||||||
|
options ParseOption
|
||||||
|
optionals int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a custom Parser with custom options.
|
||||||
|
//
|
||||||
|
// // Standard parser without descriptors
|
||||||
|
// specParser := NewParser(Minute | Hour | Dom | Month | Dow)
|
||||||
|
// sched, err := specParser.Parse("0 0 15 */3 *")
|
||||||
|
//
|
||||||
|
// // Same as above, just excludes time fields
|
||||||
|
// subsParser := NewParser(Dom | Month | Dow)
|
||||||
|
// sched, err := specParser.Parse("15 */3 *")
|
||||||
|
//
|
||||||
|
// // Same as above, just makes Dow optional
|
||||||
|
// subsParser := NewParser(Dom | Month | DowOptional)
|
||||||
|
// sched, err := specParser.Parse("15 */3")
|
||||||
|
//
|
||||||
|
func NewParser(options ParseOption) Parser {
|
||||||
|
optionals := 0
|
||||||
|
if options&DowOptional > 0 {
|
||||||
|
options |= Dow
|
||||||
|
optionals++
|
||||||
|
}
|
||||||
|
return Parser{options, optionals}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse returns a new crontab schedule representing the given spec.
|
||||||
|
// It returns a descriptive error if the spec is not valid.
|
||||||
|
// It accepts crontab specs and features configured by NewParser.
|
||||||
|
func (p Parser) Parse(spec string) (Schedule, error) {
|
||||||
|
if len(spec) == 0 {
|
||||||
|
return nil, fmt.Errorf("Empty spec string")
|
||||||
|
}
|
||||||
|
if spec[0] == '@' && p.options&Descriptor > 0 {
|
||||||
|
return parseDescriptor(spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Figure out how many fields we need
|
||||||
|
max := 0
|
||||||
|
for _, place := range places {
|
||||||
|
if p.options&place > 0 {
|
||||||
|
max++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
min := max - p.optionals
|
||||||
|
|
||||||
|
// Split fields on whitespace
|
||||||
|
fields := strings.Fields(spec)
|
||||||
|
|
||||||
|
// Validate number of fields
|
||||||
|
if count := len(fields); count < min || count > max {
|
||||||
|
if min == max {
|
||||||
|
return nil, fmt.Errorf("Expected exactly %d fields, found %d: %s", min, count, spec)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("Expected %d to %d fields, found %d: %s", min, max, count, spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fill in missing fields
|
||||||
|
fields = expandFields(fields, p.options)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
field := func(field string, r bounds) uint64 {
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
var bits uint64
|
||||||
|
bits, err = getField(field, r)
|
||||||
|
return bits
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
second = field(fields[0], seconds)
|
||||||
|
minute = field(fields[1], minutes)
|
||||||
|
hour = field(fields[2], hours)
|
||||||
|
dayofmonth = field(fields[3], dom)
|
||||||
|
month = field(fields[4], months)
|
||||||
|
dayofweek = field(fields[5], dow)
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &SpecSchedule{
|
||||||
|
Second: second,
|
||||||
|
Minute: minute,
|
||||||
|
Hour: hour,
|
||||||
|
Dom: dayofmonth,
|
||||||
|
Month: month,
|
||||||
|
Dow: dayofweek,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func expandFields(fields []string, options ParseOption) []string {
|
||||||
|
n := 0
|
||||||
|
count := len(fields)
|
||||||
|
expFields := make([]string, len(places))
|
||||||
|
copy(expFields, defaults)
|
||||||
|
for i, place := range places {
|
||||||
|
if options&place > 0 {
|
||||||
|
expFields[i] = fields[n]
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
if n == count {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return expFields
|
||||||
|
}
|
||||||
|
|
||||||
|
var standardParser = NewParser(
|
||||||
|
Minute | Hour | Dom | Month | Dow | Descriptor,
|
||||||
|
)
|
||||||
|
|
||||||
|
// ParseStandard returns a new crontab schedule representing the given standardSpec
|
||||||
|
// (https://en.wikipedia.org/wiki/Cron). It differs from Parse requiring to always
|
||||||
|
// pass 5 entries representing: minute, hour, day of month, month and day of week,
|
||||||
|
// in that order. It returns a descriptive error if the spec is not valid.
|
||||||
|
//
|
||||||
|
// It accepts
|
||||||
|
// - Standard crontab specs, e.g. "* * * * ?"
|
||||||
|
// - Descriptors, e.g. "@midnight", "@every 1h30m"
|
||||||
|
func ParseStandard(standardSpec string) (Schedule, error) {
|
||||||
|
return standardParser.Parse(standardSpec)
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultParser = NewParser(
|
||||||
|
Second | Minute | Hour | Dom | Month | DowOptional | Descriptor,
|
||||||
|
)
|
||||||
|
|
||||||
|
// Parse returns a new crontab schedule representing the given spec.
|
||||||
|
// It returns a descriptive error if the spec is not valid.
|
||||||
|
//
|
||||||
|
// It accepts
|
||||||
|
// - Full crontab specs, e.g. "* * * * * ?"
|
||||||
|
// - Descriptors, e.g. "@midnight", "@every 1h30m"
|
||||||
|
func Parse(spec string) (Schedule, error) {
|
||||||
|
return defaultParser.Parse(spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getField returns an Int with the bits set representing all of the times that
|
||||||
|
// the field represents or error parsing field value. A "field" is a comma-separated
|
||||||
|
// list of "ranges".
|
||||||
|
func getField(field string, r bounds) (uint64, error) {
|
||||||
|
var bits uint64
|
||||||
|
ranges := strings.FieldsFunc(field, func(r rune) bool { return r == ',' })
|
||||||
|
for _, expr := range ranges {
|
||||||
|
bit, err := getRange(expr, r)
|
||||||
|
if err != nil {
|
||||||
|
return bits, err
|
||||||
|
}
|
||||||
|
bits |= bit
|
||||||
|
}
|
||||||
|
return bits, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getRange returns the bits indicated by the given expression:
|
||||||
|
// number | number "-" number [ "/" number ]
|
||||||
|
// or error parsing range.
|
||||||
|
func getRange(expr string, r bounds) (uint64, error) {
|
||||||
|
var (
|
||||||
|
start, end, step uint
|
||||||
|
rangeAndStep = strings.Split(expr, "/")
|
||||||
|
lowAndHigh = strings.Split(rangeAndStep[0], "-")
|
||||||
|
singleDigit = len(lowAndHigh) == 1
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
var extra uint64
|
||||||
|
if lowAndHigh[0] == "*" || lowAndHigh[0] == "?" {
|
||||||
|
start = r.min
|
||||||
|
end = r.max
|
||||||
|
extra = starBit
|
||||||
|
} else {
|
||||||
|
start, err = parseIntOrName(lowAndHigh[0], r.names)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
switch len(lowAndHigh) {
|
||||||
|
case 1:
|
||||||
|
end = start
|
||||||
|
case 2:
|
||||||
|
end, err = parseIntOrName(lowAndHigh[1], r.names)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return 0, fmt.Errorf("Too many hyphens: %s", expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch len(rangeAndStep) {
|
||||||
|
case 1:
|
||||||
|
step = 1
|
||||||
|
case 2:
|
||||||
|
step, err = mustParseInt(rangeAndStep[1])
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Special handling: "N/step" means "N-max/step".
|
||||||
|
if singleDigit {
|
||||||
|
end = r.max
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return 0, fmt.Errorf("Too many slashes: %s", expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
if start < r.min {
|
||||||
|
return 0, fmt.Errorf("Beginning of range (%d) below minimum (%d): %s", start, r.min, expr)
|
||||||
|
}
|
||||||
|
if end > r.max {
|
||||||
|
return 0, fmt.Errorf("End of range (%d) above maximum (%d): %s", end, r.max, expr)
|
||||||
|
}
|
||||||
|
if start > end {
|
||||||
|
return 0, fmt.Errorf("Beginning of range (%d) beyond end of range (%d): %s", start, end, expr)
|
||||||
|
}
|
||||||
|
if step == 0 {
|
||||||
|
return 0, fmt.Errorf("Step of range should be a positive number: %s", expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return getBits(start, end, step) | extra, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseIntOrName returns the (possibly-named) integer contained in expr.
|
||||||
|
func parseIntOrName(expr string, names map[string]uint) (uint, error) {
|
||||||
|
if names != nil {
|
||||||
|
if namedInt, ok := names[strings.ToLower(expr)]; ok {
|
||||||
|
return namedInt, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return mustParseInt(expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// mustParseInt parses the given expression as an int or returns an error.
|
||||||
|
func mustParseInt(expr string) (uint, error) {
|
||||||
|
num, err := strconv.Atoi(expr)
|
||||||
|
if err != nil {
|
||||||
|
return 0, fmt.Errorf("Failed to parse int from %s: %s", expr, err)
|
||||||
|
}
|
||||||
|
if num < 0 {
|
||||||
|
return 0, fmt.Errorf("Negative number (%d) not allowed: %s", num, expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return uint(num), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getBits sets all bits in the range [min, max], modulo the given step size.
|
||||||
|
func getBits(min, max, step uint) uint64 {
|
||||||
|
var bits uint64
|
||||||
|
|
||||||
|
// If step is 1, use shifts.
|
||||||
|
if step == 1 {
|
||||||
|
return ^(math.MaxUint64 << (max + 1)) & (math.MaxUint64 << min)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Else, use a simple loop.
|
||||||
|
for i := min; i <= max; i += step {
|
||||||
|
bits |= 1 << i
|
||||||
|
}
|
||||||
|
return bits
|
||||||
|
}
|
||||||
|
|
||||||
|
// all returns all bits within the given bounds. (plus the star bit)
|
||||||
|
func all(r bounds) uint64 {
|
||||||
|
return getBits(r.min, r.max, 1) | starBit
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseDescriptor returns a predefined schedule for the expression, or error if none matches.
|
||||||
|
func parseDescriptor(descriptor string) (Schedule, error) {
|
||||||
|
switch descriptor {
|
||||||
|
case "@yearly", "@annually":
|
||||||
|
return &SpecSchedule{
|
||||||
|
Second: 1 << seconds.min,
|
||||||
|
Minute: 1 << minutes.min,
|
||||||
|
Hour: 1 << hours.min,
|
||||||
|
Dom: 1 << dom.min,
|
||||||
|
Month: 1 << months.min,
|
||||||
|
Dow: all(dow),
|
||||||
|
}, nil
|
||||||
|
|
||||||
|
case "@monthly":
|
||||||
|
return &SpecSchedule{
|
||||||
|
Second: 1 << seconds.min,
|
||||||
|
Minute: 1 << minutes.min,
|
||||||
|
Hour: 1 << hours.min,
|
||||||
|
Dom: 1 << dom.min,
|
||||||
|
Month: all(months),
|
||||||
|
Dow: all(dow),
|
||||||
|
}, nil
|
||||||
|
|
||||||
|
case "@weekly":
|
||||||
|
return &SpecSchedule{
|
||||||
|
Second: 1 << seconds.min,
|
||||||
|
Minute: 1 << minutes.min,
|
||||||
|
Hour: 1 << hours.min,
|
||||||
|
Dom: all(dom),
|
||||||
|
Month: all(months),
|
||||||
|
Dow: 1 << dow.min,
|
||||||
|
}, nil
|
||||||
|
|
||||||
|
case "@daily", "@midnight":
|
||||||
|
return &SpecSchedule{
|
||||||
|
Second: 1 << seconds.min,
|
||||||
|
Minute: 1 << minutes.min,
|
||||||
|
Hour: 1 << hours.min,
|
||||||
|
Dom: all(dom),
|
||||||
|
Month: all(months),
|
||||||
|
Dow: all(dow),
|
||||||
|
}, nil
|
||||||
|
|
||||||
|
case "@hourly":
|
||||||
|
return &SpecSchedule{
|
||||||
|
Second: 1 << seconds.min,
|
||||||
|
Minute: 1 << minutes.min,
|
||||||
|
Hour: all(hours),
|
||||||
|
Dom: all(dom),
|
||||||
|
Month: all(months),
|
||||||
|
Dow: all(dow),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const every = "@every "
|
||||||
|
if strings.HasPrefix(descriptor, every) {
|
||||||
|
duration, err := time.ParseDuration(descriptor[len(every):])
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Failed to parse duration %s: %s", descriptor, err)
|
||||||
|
}
|
||||||
|
return Every(duration), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("Unrecognized descriptor: %s", descriptor)
|
||||||
|
}
|
158
vendor/github.com/robfig/cron/spec.go
generated
vendored
Normal file
158
vendor/github.com/robfig/cron/spec.go
generated
vendored
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
package cron
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
// SpecSchedule specifies a duty cycle (to the second granularity), based on a
|
||||||
|
// traditional crontab specification. It is computed initially and stored as bit sets.
|
||||||
|
type SpecSchedule struct {
|
||||||
|
Second, Minute, Hour, Dom, Month, Dow uint64
|
||||||
|
}
|
||||||
|
|
||||||
|
// bounds provides a range of acceptable values (plus a map of name to value).
|
||||||
|
type bounds struct {
|
||||||
|
min, max uint
|
||||||
|
names map[string]uint
|
||||||
|
}
|
||||||
|
|
||||||
|
// The bounds for each field.
|
||||||
|
var (
|
||||||
|
seconds = bounds{0, 59, nil}
|
||||||
|
minutes = bounds{0, 59, nil}
|
||||||
|
hours = bounds{0, 23, nil}
|
||||||
|
dom = bounds{1, 31, nil}
|
||||||
|
months = bounds{1, 12, map[string]uint{
|
||||||
|
"jan": 1,
|
||||||
|
"feb": 2,
|
||||||
|
"mar": 3,
|
||||||
|
"apr": 4,
|
||||||
|
"may": 5,
|
||||||
|
"jun": 6,
|
||||||
|
"jul": 7,
|
||||||
|
"aug": 8,
|
||||||
|
"sep": 9,
|
||||||
|
"oct": 10,
|
||||||
|
"nov": 11,
|
||||||
|
"dec": 12,
|
||||||
|
}}
|
||||||
|
dow = bounds{0, 6, map[string]uint{
|
||||||
|
"sun": 0,
|
||||||
|
"mon": 1,
|
||||||
|
"tue": 2,
|
||||||
|
"wed": 3,
|
||||||
|
"thu": 4,
|
||||||
|
"fri": 5,
|
||||||
|
"sat": 6,
|
||||||
|
}}
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Set the top bit if a star was included in the expression.
|
||||||
|
starBit = 1 << 63
|
||||||
|
)
|
||||||
|
|
||||||
|
// Next returns the next time this schedule is activated, greater than the given
|
||||||
|
// time. If no time can be found to satisfy the schedule, return the zero time.
|
||||||
|
func (s *SpecSchedule) Next(t time.Time) time.Time {
|
||||||
|
// General approach:
|
||||||
|
// For Month, Day, Hour, Minute, Second:
|
||||||
|
// Check if the time value matches. If yes, continue to the next field.
|
||||||
|
// If the field doesn't match the schedule, then increment the field until it matches.
|
||||||
|
// While incrementing the field, a wrap-around brings it back to the beginning
|
||||||
|
// of the field list (since it is necessary to re-verify previous field
|
||||||
|
// values)
|
||||||
|
|
||||||
|
// Start at the earliest possible time (the upcoming second).
|
||||||
|
t = t.Add(1*time.Second - time.Duration(t.Nanosecond())*time.Nanosecond)
|
||||||
|
|
||||||
|
// This flag indicates whether a field has been incremented.
|
||||||
|
added := false
|
||||||
|
|
||||||
|
// If no time is found within five years, return zero.
|
||||||
|
yearLimit := t.Year() + 5
|
||||||
|
|
||||||
|
WRAP:
|
||||||
|
if t.Year() > yearLimit {
|
||||||
|
return time.Time{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the first applicable month.
|
||||||
|
// If it's this month, then do nothing.
|
||||||
|
for 1<<uint(t.Month())&s.Month == 0 {
|
||||||
|
// If we have to add a month, reset the other parts to 0.
|
||||||
|
if !added {
|
||||||
|
added = true
|
||||||
|
// Otherwise, set the date at the beginning (since the current time is irrelevant).
|
||||||
|
t = time.Date(t.Year(), t.Month(), 1, 0, 0, 0, 0, t.Location())
|
||||||
|
}
|
||||||
|
t = t.AddDate(0, 1, 0)
|
||||||
|
|
||||||
|
// Wrapped around.
|
||||||
|
if t.Month() == time.January {
|
||||||
|
goto WRAP
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now get a day in that month.
|
||||||
|
for !dayMatches(s, t) {
|
||||||
|
if !added {
|
||||||
|
added = true
|
||||||
|
t = time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
|
||||||
|
}
|
||||||
|
t = t.AddDate(0, 0, 1)
|
||||||
|
|
||||||
|
if t.Day() == 1 {
|
||||||
|
goto WRAP
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for 1<<uint(t.Hour())&s.Hour == 0 {
|
||||||
|
if !added {
|
||||||
|
added = true
|
||||||
|
t = time.Date(t.Year(), t.Month(), t.Day(), t.Hour(), 0, 0, 0, t.Location())
|
||||||
|
}
|
||||||
|
t = t.Add(1 * time.Hour)
|
||||||
|
|
||||||
|
if t.Hour() == 0 {
|
||||||
|
goto WRAP
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for 1<<uint(t.Minute())&s.Minute == 0 {
|
||||||
|
if !added {
|
||||||
|
added = true
|
||||||
|
t = t.Truncate(time.Minute)
|
||||||
|
}
|
||||||
|
t = t.Add(1 * time.Minute)
|
||||||
|
|
||||||
|
if t.Minute() == 0 {
|
||||||
|
goto WRAP
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for 1<<uint(t.Second())&s.Second == 0 {
|
||||||
|
if !added {
|
||||||
|
added = true
|
||||||
|
t = t.Truncate(time.Second)
|
||||||
|
}
|
||||||
|
t = t.Add(1 * time.Second)
|
||||||
|
|
||||||
|
if t.Second() == 0 {
|
||||||
|
goto WRAP
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
// dayMatches returns true if the schedule's day-of-week and day-of-month
|
||||||
|
// restrictions are satisfied by the given time.
|
||||||
|
func dayMatches(s *SpecSchedule, t time.Time) bool {
|
||||||
|
var (
|
||||||
|
domMatch bool = 1<<uint(t.Day())&s.Dom > 0
|
||||||
|
dowMatch bool = 1<<uint(t.Weekday())&s.Dow > 0
|
||||||
|
)
|
||||||
|
if s.Dom&starBit > 0 || s.Dow&starBit > 0 {
|
||||||
|
return domMatch && dowMatch
|
||||||
|
}
|
||||||
|
return domMatch || dowMatch
|
||||||
|
}
|
3
vendor/golang.org/x/net/AUTHORS
generated
vendored
Normal file
3
vendor/golang.org/x/net/AUTHORS
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# This source code refers to The Go Authors for copyright purposes.
|
||||||
|
# The master list of authors is in the main Go distribution,
|
||||||
|
# visible at http://tip.golang.org/AUTHORS.
|
3
vendor/golang.org/x/net/CONTRIBUTORS
generated
vendored
Normal file
3
vendor/golang.org/x/net/CONTRIBUTORS
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# This source code was written by the Go contributors.
|
||||||
|
# The master list of contributors is in the main Go distribution,
|
||||||
|
# visible at http://tip.golang.org/CONTRIBUTORS.
|
27
vendor/golang.org/x/net/LICENSE
generated
vendored
Normal file
27
vendor/golang.org/x/net/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
22
vendor/golang.org/x/net/PATENTS
generated
vendored
Normal file
22
vendor/golang.org/x/net/PATENTS
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
Additional IP Rights Grant (Patents)
|
||||||
|
|
||||||
|
"This implementation" means the copyrightable works distributed by
|
||||||
|
Google as part of the Go project.
|
||||||
|
|
||||||
|
Google hereby grants to You a perpetual, worldwide, non-exclusive,
|
||||||
|
no-charge, royalty-free, irrevocable (except as stated in this section)
|
||||||
|
patent license to make, have made, use, offer to sell, sell, import,
|
||||||
|
transfer and otherwise run, modify and propagate the contents of this
|
||||||
|
implementation of Go, where such license applies only to those patent
|
||||||
|
claims, both currently owned or controlled by Google and acquired in
|
||||||
|
the future, licensable by Google that are necessarily infringed by this
|
||||||
|
implementation of Go. This grant does not include claims that would be
|
||||||
|
infringed only as a consequence of further modification of this
|
||||||
|
implementation. If you or your agent or exclusive licensee institute or
|
||||||
|
order or agree to the institution of patent litigation against any
|
||||||
|
entity (including a cross-claim or counterclaim in a lawsuit) alleging
|
||||||
|
that this implementation of Go or any code incorporated within this
|
||||||
|
implementation of Go constitutes direct or contributory patent
|
||||||
|
infringement, or inducement of patent infringement, then any patent
|
||||||
|
rights granted to you under this License for this implementation of Go
|
||||||
|
shall terminate as of the date such litigation is filed.
|
78
vendor/golang.org/x/net/html/atom/atom.go
generated
vendored
Normal file
78
vendor/golang.org/x/net/html/atom/atom.go
generated
vendored
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
// Copyright 2012 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package atom provides integer codes (also known as atoms) for a fixed set of
|
||||||
|
// frequently occurring HTML strings: tag names and attribute keys such as "p"
|
||||||
|
// and "id".
|
||||||
|
//
|
||||||
|
// Sharing an atom's name between all elements with the same tag can result in
|
||||||
|
// fewer string allocations when tokenizing and parsing HTML. Integer
|
||||||
|
// comparisons are also generally faster than string comparisons.
|
||||||
|
//
|
||||||
|
// The value of an atom's particular code is not guaranteed to stay the same
|
||||||
|
// between versions of this package. Neither is any ordering guaranteed:
|
||||||
|
// whether atom.H1 < atom.H2 may also change. The codes are not guaranteed to
|
||||||
|
// be dense. The only guarantees are that e.g. looking up "div" will yield
|
||||||
|
// atom.Div, calling atom.Div.String will return "div", and atom.Div != 0.
|
||||||
|
package atom // import "golang.org/x/net/html/atom"
|
||||||
|
|
||||||
|
// Atom is an integer code for a string. The zero value maps to "".
|
||||||
|
type Atom uint32
|
||||||
|
|
||||||
|
// String returns the atom's name.
|
||||||
|
func (a Atom) String() string {
|
||||||
|
start := uint32(a >> 8)
|
||||||
|
n := uint32(a & 0xff)
|
||||||
|
if start+n > uint32(len(atomText)) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return atomText[start : start+n]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a Atom) string() string {
|
||||||
|
return atomText[a>>8 : a>>8+a&0xff]
|
||||||
|
}
|
||||||
|
|
||||||
|
// fnv computes the FNV hash with an arbitrary starting value h.
|
||||||
|
func fnv(h uint32, s []byte) uint32 {
|
||||||
|
for i := range s {
|
||||||
|
h ^= uint32(s[i])
|
||||||
|
h *= 16777619
|
||||||
|
}
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
|
func match(s string, t []byte) bool {
|
||||||
|
for i, c := range t {
|
||||||
|
if s[i] != c {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lookup returns the atom whose name is s. It returns zero if there is no
|
||||||
|
// such atom. The lookup is case sensitive.
|
||||||
|
func Lookup(s []byte) Atom {
|
||||||
|
if len(s) == 0 || len(s) > maxAtomLen {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
h := fnv(hash0, s)
|
||||||
|
if a := table[h&uint32(len(table)-1)]; int(a&0xff) == len(s) && match(a.string(), s) {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
if a := table[(h>>16)&uint32(len(table)-1)]; int(a&0xff) == len(s) && match(a.string(), s) {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns a string whose contents are equal to s. In that sense, it is
|
||||||
|
// equivalent to string(s) but may be more efficient.
|
||||||
|
func String(s []byte) string {
|
||||||
|
if a := Lookup(s); a != 0 {
|
||||||
|
return a.String()
|
||||||
|
}
|
||||||
|
return string(s)
|
||||||
|
}
|
712
vendor/golang.org/x/net/html/atom/gen.go
generated
vendored
Normal file
712
vendor/golang.org/x/net/html/atom/gen.go
generated
vendored
Normal file
@ -0,0 +1,712 @@
|
|||||||
|
// Copyright 2012 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build ignore
|
||||||
|
|
||||||
|
//go:generate go run gen.go
|
||||||
|
//go:generate go run gen.go -test
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/format"
|
||||||
|
"io/ioutil"
|
||||||
|
"math/rand"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// identifier converts s to a Go exported identifier.
|
||||||
|
// It converts "div" to "Div" and "accept-charset" to "AcceptCharset".
|
||||||
|
func identifier(s string) string {
|
||||||
|
b := make([]byte, 0, len(s))
|
||||||
|
cap := true
|
||||||
|
for _, c := range s {
|
||||||
|
if c == '-' {
|
||||||
|
cap = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if cap && 'a' <= c && c <= 'z' {
|
||||||
|
c -= 'a' - 'A'
|
||||||
|
}
|
||||||
|
cap = false
|
||||||
|
b = append(b, byte(c))
|
||||||
|
}
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
var test = flag.Bool("test", false, "generate table_test.go")
|
||||||
|
|
||||||
|
func genFile(name string, buf *bytes.Buffer) {
|
||||||
|
b, err := format.Source(buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
if err := ioutil.WriteFile(name, b, 0644); err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
var all []string
|
||||||
|
all = append(all, elements...)
|
||||||
|
all = append(all, attributes...)
|
||||||
|
all = append(all, eventHandlers...)
|
||||||
|
all = append(all, extra...)
|
||||||
|
sort.Strings(all)
|
||||||
|
|
||||||
|
// uniq - lists have dups
|
||||||
|
w := 0
|
||||||
|
for _, s := range all {
|
||||||
|
if w == 0 || all[w-1] != s {
|
||||||
|
all[w] = s
|
||||||
|
w++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
all = all[:w]
|
||||||
|
|
||||||
|
if *test {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n")
|
||||||
|
fmt.Fprintln(&buf, "//go:generate go run gen.go -test\n")
|
||||||
|
fmt.Fprintln(&buf, "package atom\n")
|
||||||
|
fmt.Fprintln(&buf, "var testAtomList = []string{")
|
||||||
|
for _, s := range all {
|
||||||
|
fmt.Fprintf(&buf, "\t%q,\n", s)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(&buf, "}")
|
||||||
|
|
||||||
|
genFile("table_test.go", &buf)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find hash that minimizes table size.
|
||||||
|
var best *table
|
||||||
|
for i := 0; i < 1000000; i++ {
|
||||||
|
if best != nil && 1<<(best.k-1) < len(all) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
h := rand.Uint32()
|
||||||
|
for k := uint(0); k <= 16; k++ {
|
||||||
|
if best != nil && k >= best.k {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
var t table
|
||||||
|
if t.init(h, k, all) {
|
||||||
|
best = &t
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if best == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "failed to construct string table\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lay out strings, using overlaps when possible.
|
||||||
|
layout := append([]string{}, all...)
|
||||||
|
|
||||||
|
// Remove strings that are substrings of other strings
|
||||||
|
for changed := true; changed; {
|
||||||
|
changed = false
|
||||||
|
for i, s := range layout {
|
||||||
|
if s == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for j, t := range layout {
|
||||||
|
if i != j && t != "" && strings.Contains(s, t) {
|
||||||
|
changed = true
|
||||||
|
layout[j] = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Join strings where one suffix matches another prefix.
|
||||||
|
for {
|
||||||
|
// Find best i, j, k such that layout[i][len-k:] == layout[j][:k],
|
||||||
|
// maximizing overlap length k.
|
||||||
|
besti := -1
|
||||||
|
bestj := -1
|
||||||
|
bestk := 0
|
||||||
|
for i, s := range layout {
|
||||||
|
if s == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for j, t := range layout {
|
||||||
|
if i == j {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for k := bestk + 1; k <= len(s) && k <= len(t); k++ {
|
||||||
|
if s[len(s)-k:] == t[:k] {
|
||||||
|
besti = i
|
||||||
|
bestj = j
|
||||||
|
bestk = k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if bestk > 0 {
|
||||||
|
layout[besti] += layout[bestj][bestk:]
|
||||||
|
layout[bestj] = ""
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
text := strings.Join(layout, "")
|
||||||
|
|
||||||
|
atom := map[string]uint32{}
|
||||||
|
for _, s := range all {
|
||||||
|
off := strings.Index(text, s)
|
||||||
|
if off < 0 {
|
||||||
|
panic("lost string " + s)
|
||||||
|
}
|
||||||
|
atom[s] = uint32(off<<8 | len(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
// Generate the Go code.
|
||||||
|
fmt.Fprintln(&buf, "// Code generated by go generate gen.go; DO NOT EDIT.\n")
|
||||||
|
fmt.Fprintln(&buf, "//go:generate go run gen.go\n")
|
||||||
|
fmt.Fprintln(&buf, "package atom\n\nconst (")
|
||||||
|
|
||||||
|
// compute max len
|
||||||
|
maxLen := 0
|
||||||
|
for _, s := range all {
|
||||||
|
if maxLen < len(s) {
|
||||||
|
maxLen = len(s)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "\t%s Atom = %#x\n", identifier(s), atom[s])
|
||||||
|
}
|
||||||
|
fmt.Fprintln(&buf, ")\n")
|
||||||
|
|
||||||
|
fmt.Fprintf(&buf, "const hash0 = %#x\n\n", best.h0)
|
||||||
|
fmt.Fprintf(&buf, "const maxAtomLen = %d\n\n", maxLen)
|
||||||
|
|
||||||
|
fmt.Fprintf(&buf, "var table = [1<<%d]Atom{\n", best.k)
|
||||||
|
for i, s := range best.tab {
|
||||||
|
if s == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "\t%#x: %#x, // %s\n", i, atom[s], s)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "}\n")
|
||||||
|
datasize := (1 << best.k) * 4
|
||||||
|
|
||||||
|
fmt.Fprintln(&buf, "const atomText =")
|
||||||
|
textsize := len(text)
|
||||||
|
for len(text) > 60 {
|
||||||
|
fmt.Fprintf(&buf, "\t%q +\n", text[:60])
|
||||||
|
text = text[60:]
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&buf, "\t%q\n\n", text)
|
||||||
|
|
||||||
|
genFile("table.go", &buf)
|
||||||
|
|
||||||
|
fmt.Fprintf(os.Stdout, "%d atoms; %d string bytes + %d tables = %d total data\n", len(all), textsize, datasize, textsize+datasize)
|
||||||
|
}
|
||||||
|
|
||||||
|
type byLen []string
|
||||||
|
|
||||||
|
func (x byLen) Less(i, j int) bool { return len(x[i]) > len(x[j]) }
|
||||||
|
func (x byLen) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||||
|
func (x byLen) Len() int { return len(x) }
|
||||||
|
|
||||||
|
// fnv computes the FNV hash with an arbitrary starting value h.
|
||||||
|
func fnv(h uint32, s string) uint32 {
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
h ^= uint32(s[i])
|
||||||
|
h *= 16777619
|
||||||
|
}
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
|
// A table represents an attempt at constructing the lookup table.
|
||||||
|
// The lookup table uses cuckoo hashing, meaning that each string
|
||||||
|
// can be found in one of two positions.
|
||||||
|
type table struct {
|
||||||
|
h0 uint32
|
||||||
|
k uint
|
||||||
|
mask uint32
|
||||||
|
tab []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// hash returns the two hashes for s.
|
||||||
|
func (t *table) hash(s string) (h1, h2 uint32) {
|
||||||
|
h := fnv(t.h0, s)
|
||||||
|
h1 = h & t.mask
|
||||||
|
h2 = (h >> 16) & t.mask
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// init initializes the table with the given parameters.
|
||||||
|
// h0 is the initial hash value,
|
||||||
|
// k is the number of bits of hash value to use, and
|
||||||
|
// x is the list of strings to store in the table.
|
||||||
|
// init returns false if the table cannot be constructed.
|
||||||
|
func (t *table) init(h0 uint32, k uint, x []string) bool {
|
||||||
|
t.h0 = h0
|
||||||
|
t.k = k
|
||||||
|
t.tab = make([]string, 1<<k)
|
||||||
|
t.mask = 1<<k - 1
|
||||||
|
for _, s := range x {
|
||||||
|
if !t.insert(s) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert inserts s in the table.
|
||||||
|
func (t *table) insert(s string) bool {
|
||||||
|
h1, h2 := t.hash(s)
|
||||||
|
if t.tab[h1] == "" {
|
||||||
|
t.tab[h1] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if t.tab[h2] == "" {
|
||||||
|
t.tab[h2] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if t.push(h1, 0) {
|
||||||
|
t.tab[h1] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if t.push(h2, 0) {
|
||||||
|
t.tab[h2] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// push attempts to push aside the entry in slot i.
|
||||||
|
func (t *table) push(i uint32, depth int) bool {
|
||||||
|
if depth > len(t.tab) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
s := t.tab[i]
|
||||||
|
h1, h2 := t.hash(s)
|
||||||
|
j := h1 + h2 - i
|
||||||
|
if t.tab[j] != "" && !t.push(j, depth+1) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
t.tab[j] = s
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// The lists of element names and attribute keys were taken from
|
||||||
|
// https://html.spec.whatwg.org/multipage/indices.html#index
|
||||||
|
// as of the "HTML Living Standard - Last Updated 16 April 2018" version.
|
||||||
|
|
||||||
|
// "command", "keygen" and "menuitem" have been removed from the spec,
|
||||||
|
// but are kept here for backwards compatibility.
|
||||||
|
var elements = []string{
|
||||||
|
"a",
|
||||||
|
"abbr",
|
||||||
|
"address",
|
||||||
|
"area",
|
||||||
|
"article",
|
||||||
|
"aside",
|
||||||
|
"audio",
|
||||||
|
"b",
|
||||||
|
"base",
|
||||||
|
"bdi",
|
||||||
|
"bdo",
|
||||||
|
"blockquote",
|
||||||
|
"body",
|
||||||
|
"br",
|
||||||
|
"button",
|
||||||
|
"canvas",
|
||||||
|
"caption",
|
||||||
|
"cite",
|
||||||
|
"code",
|
||||||
|
"col",
|
||||||
|
"colgroup",
|
||||||
|
"command",
|
||||||
|
"data",
|
||||||
|
"datalist",
|
||||||
|
"dd",
|
||||||
|
"del",
|
||||||
|
"details",
|
||||||
|
"dfn",
|
||||||
|
"dialog",
|
||||||
|
"div",
|
||||||
|
"dl",
|
||||||
|
"dt",
|
||||||
|
"em",
|
||||||
|
"embed",
|
||||||
|
"fieldset",
|
||||||
|
"figcaption",
|
||||||
|
"figure",
|
||||||
|
"footer",
|
||||||
|
"form",
|
||||||
|
"h1",
|
||||||
|
"h2",
|
||||||
|
"h3",
|
||||||
|
"h4",
|
||||||
|
"h5",
|
||||||
|
"h6",
|
||||||
|
"head",
|
||||||
|
"header",
|
||||||
|
"hgroup",
|
||||||
|
"hr",
|
||||||
|
"html",
|
||||||
|
"i",
|
||||||
|
"iframe",
|
||||||
|
"img",
|
||||||
|
"input",
|
||||||
|
"ins",
|
||||||
|
"kbd",
|
||||||
|
"keygen",
|
||||||
|
"label",
|
||||||
|
"legend",
|
||||||
|
"li",
|
||||||
|
"link",
|
||||||
|
"main",
|
||||||
|
"map",
|
||||||
|
"mark",
|
||||||
|
"menu",
|
||||||
|
"menuitem",
|
||||||
|
"meta",
|
||||||
|
"meter",
|
||||||
|
"nav",
|
||||||
|
"noscript",
|
||||||
|
"object",
|
||||||
|
"ol",
|
||||||
|
"optgroup",
|
||||||
|
"option",
|
||||||
|
"output",
|
||||||
|
"p",
|
||||||
|
"param",
|
||||||
|
"picture",
|
||||||
|
"pre",
|
||||||
|
"progress",
|
||||||
|
"q",
|
||||||
|
"rp",
|
||||||
|
"rt",
|
||||||
|
"ruby",
|
||||||
|
"s",
|
||||||
|
"samp",
|
||||||
|
"script",
|
||||||
|
"section",
|
||||||
|
"select",
|
||||||
|
"slot",
|
||||||
|
"small",
|
||||||
|
"source",
|
||||||
|
"span",
|
||||||
|
"strong",
|
||||||
|
"style",
|
||||||
|
"sub",
|
||||||
|
"summary",
|
||||||
|
"sup",
|
||||||
|
"table",
|
||||||
|
"tbody",
|
||||||
|
"td",
|
||||||
|
"template",
|
||||||
|
"textarea",
|
||||||
|
"tfoot",
|
||||||
|
"th",
|
||||||
|
"thead",
|
||||||
|
"time",
|
||||||
|
"title",
|
||||||
|
"tr",
|
||||||
|
"track",
|
||||||
|
"u",
|
||||||
|
"ul",
|
||||||
|
"var",
|
||||||
|
"video",
|
||||||
|
"wbr",
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://html.spec.whatwg.org/multipage/indices.html#attributes-3
|
||||||
|
//
|
||||||
|
// "challenge", "command", "contextmenu", "dropzone", "icon", "keytype", "mediagroup",
|
||||||
|
// "radiogroup", "spellcheck", "scoped", "seamless", "sortable" and "sorted" have been removed from the spec,
|
||||||
|
// but are kept here for backwards compatibility.
|
||||||
|
var attributes = []string{
|
||||||
|
"abbr",
|
||||||
|
"accept",
|
||||||
|
"accept-charset",
|
||||||
|
"accesskey",
|
||||||
|
"action",
|
||||||
|
"allowfullscreen",
|
||||||
|
"allowpaymentrequest",
|
||||||
|
"allowusermedia",
|
||||||
|
"alt",
|
||||||
|
"as",
|
||||||
|
"async",
|
||||||
|
"autocomplete",
|
||||||
|
"autofocus",
|
||||||
|
"autoplay",
|
||||||
|
"challenge",
|
||||||
|
"charset",
|
||||||
|
"checked",
|
||||||
|
"cite",
|
||||||
|
"class",
|
||||||
|
"color",
|
||||||
|
"cols",
|
||||||
|
"colspan",
|
||||||
|
"command",
|
||||||
|
"content",
|
||||||
|
"contenteditable",
|
||||||
|
"contextmenu",
|
||||||
|
"controls",
|
||||||
|
"coords",
|
||||||
|
"crossorigin",
|
||||||
|
"data",
|
||||||
|
"datetime",
|
||||||
|
"default",
|
||||||
|
"defer",
|
||||||
|
"dir",
|
||||||
|
"dirname",
|
||||||
|
"disabled",
|
||||||
|
"download",
|
||||||
|
"draggable",
|
||||||
|
"dropzone",
|
||||||
|
"enctype",
|
||||||
|
"for",
|
||||||
|
"form",
|
||||||
|
"formaction",
|
||||||
|
"formenctype",
|
||||||
|
"formmethod",
|
||||||
|
"formnovalidate",
|
||||||
|
"formtarget",
|
||||||
|
"headers",
|
||||||
|
"height",
|
||||||
|
"hidden",
|
||||||
|
"high",
|
||||||
|
"href",
|
||||||
|
"hreflang",
|
||||||
|
"http-equiv",
|
||||||
|
"icon",
|
||||||
|
"id",
|
||||||
|
"inputmode",
|
||||||
|
"integrity",
|
||||||
|
"is",
|
||||||
|
"ismap",
|
||||||
|
"itemid",
|
||||||
|
"itemprop",
|
||||||
|
"itemref",
|
||||||
|
"itemscope",
|
||||||
|
"itemtype",
|
||||||
|
"keytype",
|
||||||
|
"kind",
|
||||||
|
"label",
|
||||||
|
"lang",
|
||||||
|
"list",
|
||||||
|
"loop",
|
||||||
|
"low",
|
||||||
|
"manifest",
|
||||||
|
"max",
|
||||||
|
"maxlength",
|
||||||
|
"media",
|
||||||
|
"mediagroup",
|
||||||
|
"method",
|
||||||
|
"min",
|
||||||
|
"minlength",
|
||||||
|
"multiple",
|
||||||
|
"muted",
|
||||||
|
"name",
|
||||||
|
"nomodule",
|
||||||
|
"nonce",
|
||||||
|
"novalidate",
|
||||||
|
"open",
|
||||||
|
"optimum",
|
||||||
|
"pattern",
|
||||||
|
"ping",
|
||||||
|
"placeholder",
|
||||||
|
"playsinline",
|
||||||
|
"poster",
|
||||||
|
"preload",
|
||||||
|
"radiogroup",
|
||||||
|
"readonly",
|
||||||
|
"referrerpolicy",
|
||||||
|
"rel",
|
||||||
|
"required",
|
||||||
|
"reversed",
|
||||||
|
"rows",
|
||||||
|
"rowspan",
|
||||||
|
"sandbox",
|
||||||
|
"spellcheck",
|
||||||
|
"scope",
|
||||||
|
"scoped",
|
||||||
|
"seamless",
|
||||||
|
"selected",
|
||||||
|
"shape",
|
||||||
|
"size",
|
||||||
|
"sizes",
|
||||||
|
"sortable",
|
||||||
|
"sorted",
|
||||||
|
"slot",
|
||||||
|
"span",
|
||||||
|
"spellcheck",
|
||||||
|
"src",
|
||||||
|
"srcdoc",
|
||||||
|
"srclang",
|
||||||
|
"srcset",
|
||||||
|
"start",
|
||||||
|
"step",
|
||||||
|
"style",
|
||||||
|
"tabindex",
|
||||||
|
"target",
|
||||||
|
"title",
|
||||||
|
"translate",
|
||||||
|
"type",
|
||||||
|
"typemustmatch",
|
||||||
|
"updateviacache",
|
||||||
|
"usemap",
|
||||||
|
"value",
|
||||||
|
"width",
|
||||||
|
"workertype",
|
||||||
|
"wrap",
|
||||||
|
}
|
||||||
|
|
||||||
|
// "onautocomplete", "onautocompleteerror", "onmousewheel",
|
||||||
|
// "onshow" and "onsort" have been removed from the spec,
|
||||||
|
// but are kept here for backwards compatibility.
|
||||||
|
var eventHandlers = []string{
|
||||||
|
"onabort",
|
||||||
|
"onautocomplete",
|
||||||
|
"onautocompleteerror",
|
||||||
|
"onauxclick",
|
||||||
|
"onafterprint",
|
||||||
|
"onbeforeprint",
|
||||||
|
"onbeforeunload",
|
||||||
|
"onblur",
|
||||||
|
"oncancel",
|
||||||
|
"oncanplay",
|
||||||
|
"oncanplaythrough",
|
||||||
|
"onchange",
|
||||||
|
"onclick",
|
||||||
|
"onclose",
|
||||||
|
"oncontextmenu",
|
||||||
|
"oncopy",
|
||||||
|
"oncuechange",
|
||||||
|
"oncut",
|
||||||
|
"ondblclick",
|
||||||
|
"ondrag",
|
||||||
|
"ondragend",
|
||||||
|
"ondragenter",
|
||||||
|
"ondragexit",
|
||||||
|
"ondragleave",
|
||||||
|
"ondragover",
|
||||||
|
"ondragstart",
|
||||||
|
"ondrop",
|
||||||
|
"ondurationchange",
|
||||||
|
"onemptied",
|
||||||
|
"onended",
|
||||||
|
"onerror",
|
||||||
|
"onfocus",
|
||||||
|
"onhashchange",
|
||||||
|
"oninput",
|
||||||
|
"oninvalid",
|
||||||
|
"onkeydown",
|
||||||
|
"onkeypress",
|
||||||
|
"onkeyup",
|
||||||
|
"onlanguagechange",
|
||||||
|
"onload",
|
||||||
|
"onloadeddata",
|
||||||
|
"onloadedmetadata",
|
||||||
|
"onloadend",
|
||||||
|
"onloadstart",
|
||||||
|
"onmessage",
|
||||||
|
"onmessageerror",
|
||||||
|
"onmousedown",
|
||||||
|
"onmouseenter",
|
||||||
|
"onmouseleave",
|
||||||
|
"onmousemove",
|
||||||
|
"onmouseout",
|
||||||
|
"onmouseover",
|
||||||
|
"onmouseup",
|
||||||
|
"onmousewheel",
|
||||||
|
"onwheel",
|
||||||
|
"onoffline",
|
||||||
|
"ononline",
|
||||||
|
"onpagehide",
|
||||||
|
"onpageshow",
|
||||||
|
"onpaste",
|
||||||
|
"onpause",
|
||||||
|
"onplay",
|
||||||
|
"onplaying",
|
||||||
|
"onpopstate",
|
||||||
|
"onprogress",
|
||||||
|
"onratechange",
|
||||||
|
"onreset",
|
||||||
|
"onresize",
|
||||||
|
"onrejectionhandled",
|
||||||
|
"onscroll",
|
||||||
|
"onsecuritypolicyviolation",
|
||||||
|
"onseeked",
|
||||||
|
"onseeking",
|
||||||
|
"onselect",
|
||||||
|
"onshow",
|
||||||
|
"onsort",
|
||||||
|
"onstalled",
|
||||||
|
"onstorage",
|
||||||
|
"onsubmit",
|
||||||
|
"onsuspend",
|
||||||
|
"ontimeupdate",
|
||||||
|
"ontoggle",
|
||||||
|
"onunhandledrejection",
|
||||||
|
"onunload",
|
||||||
|
"onvolumechange",
|
||||||
|
"onwaiting",
|
||||||
|
}
|
||||||
|
|
||||||
|
// extra are ad-hoc values not covered by any of the lists above.
|
||||||
|
var extra = []string{
|
||||||
|
"acronym",
|
||||||
|
"align",
|
||||||
|
"annotation",
|
||||||
|
"annotation-xml",
|
||||||
|
"applet",
|
||||||
|
"basefont",
|
||||||
|
"bgsound",
|
||||||
|
"big",
|
||||||
|
"blink",
|
||||||
|
"center",
|
||||||
|
"color",
|
||||||
|
"desc",
|
||||||
|
"face",
|
||||||
|
"font",
|
||||||
|
"foreignObject", // HTML is case-insensitive, but SVG-embedded-in-HTML is case-sensitive.
|
||||||
|
"foreignobject",
|
||||||
|
"frame",
|
||||||
|
"frameset",
|
||||||
|
"image",
|
||||||
|
"isindex",
|
||||||
|
"listing",
|
||||||
|
"malignmark",
|
||||||
|
"marquee",
|
||||||
|
"math",
|
||||||
|
"mglyph",
|
||||||
|
"mi",
|
||||||
|
"mn",
|
||||||
|
"mo",
|
||||||
|
"ms",
|
||||||
|
"mtext",
|
||||||
|
"nobr",
|
||||||
|
"noembed",
|
||||||
|
"noframes",
|
||||||
|
"plaintext",
|
||||||
|
"prompt",
|
||||||
|
"public",
|
||||||
|
"rb",
|
||||||
|
"rtc",
|
||||||
|
"spacer",
|
||||||
|
"strike",
|
||||||
|
"svg",
|
||||||
|
"system",
|
||||||
|
"tt",
|
||||||
|
"xmp",
|
||||||
|
}
|
783
vendor/golang.org/x/net/html/atom/table.go
generated
vendored
Normal file
783
vendor/golang.org/x/net/html/atom/table.go
generated
vendored
Normal file
@ -0,0 +1,783 @@
|
|||||||
|
// Code generated by go generate gen.go; DO NOT EDIT.
|
||||||
|
|
||||||
|
//go:generate go run gen.go
|
||||||
|
|
||||||
|
package atom
|
||||||
|
|
||||||
|
const (
|
||||||
|
A Atom = 0x1
|
||||||
|
Abbr Atom = 0x4
|
||||||
|
Accept Atom = 0x1a06
|
||||||
|
AcceptCharset Atom = 0x1a0e
|
||||||
|
Accesskey Atom = 0x2c09
|
||||||
|
Acronym Atom = 0xaa07
|
||||||
|
Action Atom = 0x27206
|
||||||
|
Address Atom = 0x6f307
|
||||||
|
Align Atom = 0xb105
|
||||||
|
Allowfullscreen Atom = 0x2080f
|
||||||
|
Allowpaymentrequest Atom = 0xc113
|
||||||
|
Allowusermedia Atom = 0xdd0e
|
||||||
|
Alt Atom = 0xf303
|
||||||
|
Annotation Atom = 0x1c90a
|
||||||
|
AnnotationXml Atom = 0x1c90e
|
||||||
|
Applet Atom = 0x31906
|
||||||
|
Area Atom = 0x35604
|
||||||
|
Article Atom = 0x3fc07
|
||||||
|
As Atom = 0x3c02
|
||||||
|
Aside Atom = 0x10705
|
||||||
|
Async Atom = 0xff05
|
||||||
|
Audio Atom = 0x11505
|
||||||
|
Autocomplete Atom = 0x2780c
|
||||||
|
Autofocus Atom = 0x12109
|
||||||
|
Autoplay Atom = 0x13c08
|
||||||
|
B Atom = 0x101
|
||||||
|
Base Atom = 0x3b04
|
||||||
|
Basefont Atom = 0x3b08
|
||||||
|
Bdi Atom = 0xba03
|
||||||
|
Bdo Atom = 0x14b03
|
||||||
|
Bgsound Atom = 0x15e07
|
||||||
|
Big Atom = 0x17003
|
||||||
|
Blink Atom = 0x17305
|
||||||
|
Blockquote Atom = 0x1870a
|
||||||
|
Body Atom = 0x2804
|
||||||
|
Br Atom = 0x202
|
||||||
|
Button Atom = 0x19106
|
||||||
|
Canvas Atom = 0x10306
|
||||||
|
Caption Atom = 0x23107
|
||||||
|
Center Atom = 0x22006
|
||||||
|
Challenge Atom = 0x29b09
|
||||||
|
Charset Atom = 0x2107
|
||||||
|
Checked Atom = 0x47907
|
||||||
|
Cite Atom = 0x19c04
|
||||||
|
Class Atom = 0x56405
|
||||||
|
Code Atom = 0x5c504
|
||||||
|
Col Atom = 0x1ab03
|
||||||
|
Colgroup Atom = 0x1ab08
|
||||||
|
Color Atom = 0x1bf05
|
||||||
|
Cols Atom = 0x1c404
|
||||||
|
Colspan Atom = 0x1c407
|
||||||
|
Command Atom = 0x1d707
|
||||||
|
Content Atom = 0x58b07
|
||||||
|
Contenteditable Atom = 0x58b0f
|
||||||
|
Contextmenu Atom = 0x3800b
|
||||||
|
Controls Atom = 0x1de08
|
||||||
|
Coords Atom = 0x1ea06
|
||||||
|
Crossorigin Atom = 0x1fb0b
|
||||||
|
Data Atom = 0x4a504
|
||||||
|
Datalist Atom = 0x4a508
|
||||||
|
Datetime Atom = 0x2b808
|
||||||
|
Dd Atom = 0x2d702
|
||||||
|
Default Atom = 0x10a07
|
||||||
|
Defer Atom = 0x5c705
|
||||||
|
Del Atom = 0x45203
|
||||||
|
Desc Atom = 0x56104
|
||||||
|
Details Atom = 0x7207
|
||||||
|
Dfn Atom = 0x8703
|
||||||
|
Dialog Atom = 0xbb06
|
||||||
|
Dir Atom = 0x9303
|
||||||
|
Dirname Atom = 0x9307
|
||||||
|
Disabled Atom = 0x16408
|
||||||
|
Div Atom = 0x16b03
|
||||||
|
Dl Atom = 0x5e602
|
||||||
|
Download Atom = 0x46308
|
||||||
|
Draggable Atom = 0x17a09
|
||||||
|
Dropzone Atom = 0x40508
|
||||||
|
Dt Atom = 0x64b02
|
||||||
|
Em Atom = 0x6e02
|
||||||
|
Embed Atom = 0x6e05
|
||||||
|
Enctype Atom = 0x28d07
|
||||||
|
Face Atom = 0x21e04
|
||||||
|
Fieldset Atom = 0x22608
|
||||||
|
Figcaption Atom = 0x22e0a
|
||||||
|
Figure Atom = 0x24806
|
||||||
|
Font Atom = 0x3f04
|
||||||
|
Footer Atom = 0xf606
|
||||||
|
For Atom = 0x25403
|
||||||
|
ForeignObject Atom = 0x2540d
|
||||||
|
Foreignobject Atom = 0x2610d
|
||||||
|
Form Atom = 0x26e04
|
||||||
|
Formaction Atom = 0x26e0a
|
||||||
|
Formenctype Atom = 0x2890b
|
||||||
|
Formmethod Atom = 0x2a40a
|
||||||
|
Formnovalidate Atom = 0x2ae0e
|
||||||
|
Formtarget Atom = 0x2c00a
|
||||||
|
Frame Atom = 0x8b05
|
||||||
|
Frameset Atom = 0x8b08
|
||||||
|
H1 Atom = 0x15c02
|
||||||
|
H2 Atom = 0x2de02
|
||||||
|
H3 Atom = 0x30d02
|
||||||
|
H4 Atom = 0x34502
|
||||||
|
H5 Atom = 0x34f02
|
||||||
|
H6 Atom = 0x64d02
|
||||||
|
Head Atom = 0x33104
|
||||||
|
Header Atom = 0x33106
|
||||||
|
Headers Atom = 0x33107
|
||||||
|
Height Atom = 0x5206
|
||||||
|
Hgroup Atom = 0x2ca06
|
||||||
|
Hidden Atom = 0x2d506
|
||||||
|
High Atom = 0x2db04
|
||||||
|
Hr Atom = 0x15702
|
||||||
|
Href Atom = 0x2e004
|
||||||
|
Hreflang Atom = 0x2e008
|
||||||
|
Html Atom = 0x5604
|
||||||
|
HttpEquiv Atom = 0x2e80a
|
||||||
|
I Atom = 0x601
|
||||||
|
Icon Atom = 0x58a04
|
||||||
|
Id Atom = 0x10902
|
||||||
|
Iframe Atom = 0x2fc06
|
||||||
|
Image Atom = 0x30205
|
||||||
|
Img Atom = 0x30703
|
||||||
|
Input Atom = 0x44b05
|
||||||
|
Inputmode Atom = 0x44b09
|
||||||
|
Ins Atom = 0x20403
|
||||||
|
Integrity Atom = 0x23f09
|
||||||
|
Is Atom = 0x16502
|
||||||
|
Isindex Atom = 0x30f07
|
||||||
|
Ismap Atom = 0x31605
|
||||||
|
Itemid Atom = 0x38b06
|
||||||
|
Itemprop Atom = 0x19d08
|
||||||
|
Itemref Atom = 0x3cd07
|
||||||
|
Itemscope Atom = 0x67109
|
||||||
|
Itemtype Atom = 0x31f08
|
||||||
|
Kbd Atom = 0xb903
|
||||||
|
Keygen Atom = 0x3206
|
||||||
|
Keytype Atom = 0xd607
|
||||||
|
Kind Atom = 0x17704
|
||||||
|
Label Atom = 0x5905
|
||||||
|
Lang Atom = 0x2e404
|
||||||
|
Legend Atom = 0x18106
|
||||||
|
Li Atom = 0xb202
|
||||||
|
Link Atom = 0x17404
|
||||||
|
List Atom = 0x4a904
|
||||||
|
Listing Atom = 0x4a907
|
||||||
|
Loop Atom = 0x5d04
|
||||||
|
Low Atom = 0xc303
|
||||||
|
Main Atom = 0x1004
|
||||||
|
Malignmark Atom = 0xb00a
|
||||||
|
Manifest Atom = 0x6d708
|
||||||
|
Map Atom = 0x31803
|
||||||
|
Mark Atom = 0xb604
|
||||||
|
Marquee Atom = 0x32707
|
||||||
|
Math Atom = 0x32e04
|
||||||
|
Max Atom = 0x33d03
|
||||||
|
Maxlength Atom = 0x33d09
|
||||||
|
Media Atom = 0xe605
|
||||||
|
Mediagroup Atom = 0xe60a
|
||||||
|
Menu Atom = 0x38704
|
||||||
|
Menuitem Atom = 0x38708
|
||||||
|
Meta Atom = 0x4b804
|
||||||
|
Meter Atom = 0x9805
|
||||||
|
Method Atom = 0x2a806
|
||||||
|
Mglyph Atom = 0x30806
|
||||||
|
Mi Atom = 0x34702
|
||||||
|
Min Atom = 0x34703
|
||||||
|
Minlength Atom = 0x34709
|
||||||
|
Mn Atom = 0x2b102
|
||||||
|
Mo Atom = 0xa402
|
||||||
|
Ms Atom = 0x67402
|
||||||
|
Mtext Atom = 0x35105
|
||||||
|
Multiple Atom = 0x35f08
|
||||||
|
Muted Atom = 0x36705
|
||||||
|
Name Atom = 0x9604
|
||||||
|
Nav Atom = 0x1303
|
||||||
|
Nobr Atom = 0x3704
|
||||||
|
Noembed Atom = 0x6c07
|
||||||
|
Noframes Atom = 0x8908
|
||||||
|
Nomodule Atom = 0xa208
|
||||||
|
Nonce Atom = 0x1a605
|
||||||
|
Noscript Atom = 0x21608
|
||||||
|
Novalidate Atom = 0x2b20a
|
||||||
|
Object Atom = 0x26806
|
||||||
|
Ol Atom = 0x13702
|
||||||
|
Onabort Atom = 0x19507
|
||||||
|
Onafterprint Atom = 0x2360c
|
||||||
|
Onautocomplete Atom = 0x2760e
|
||||||
|
Onautocompleteerror Atom = 0x27613
|
||||||
|
Onauxclick Atom = 0x61f0a
|
||||||
|
Onbeforeprint Atom = 0x69e0d
|
||||||
|
Onbeforeunload Atom = 0x6e70e
|
||||||
|
Onblur Atom = 0x56d06
|
||||||
|
Oncancel Atom = 0x11908
|
||||||
|
Oncanplay Atom = 0x14d09
|
||||||
|
Oncanplaythrough Atom = 0x14d10
|
||||||
|
Onchange Atom = 0x41b08
|
||||||
|
Onclick Atom = 0x2f507
|
||||||
|
Onclose Atom = 0x36c07
|
||||||
|
Oncontextmenu Atom = 0x37e0d
|
||||||
|
Oncopy Atom = 0x39106
|
||||||
|
Oncuechange Atom = 0x3970b
|
||||||
|
Oncut Atom = 0x3a205
|
||||||
|
Ondblclick Atom = 0x3a70a
|
||||||
|
Ondrag Atom = 0x3b106
|
||||||
|
Ondragend Atom = 0x3b109
|
||||||
|
Ondragenter Atom = 0x3ba0b
|
||||||
|
Ondragexit Atom = 0x3c50a
|
||||||
|
Ondragleave Atom = 0x3df0b
|
||||||
|
Ondragover Atom = 0x3ea0a
|
||||||
|
Ondragstart Atom = 0x3f40b
|
||||||
|
Ondrop Atom = 0x40306
|
||||||
|
Ondurationchange Atom = 0x41310
|
||||||
|
Onemptied Atom = 0x40a09
|
||||||
|
Onended Atom = 0x42307
|
||||||
|
Onerror Atom = 0x42a07
|
||||||
|
Onfocus Atom = 0x43107
|
||||||
|
Onhashchange Atom = 0x43d0c
|
||||||
|
Oninput Atom = 0x44907
|
||||||
|
Oninvalid Atom = 0x45509
|
||||||
|
Onkeydown Atom = 0x45e09
|
||||||
|
Onkeypress Atom = 0x46b0a
|
||||||
|
Onkeyup Atom = 0x48007
|
||||||
|
Onlanguagechange Atom = 0x48d10
|
||||||
|
Onload Atom = 0x49d06
|
||||||
|
Onloadeddata Atom = 0x49d0c
|
||||||
|
Onloadedmetadata Atom = 0x4b010
|
||||||
|
Onloadend Atom = 0x4c609
|
||||||
|
Onloadstart Atom = 0x4cf0b
|
||||||
|
Onmessage Atom = 0x4da09
|
||||||
|
Onmessageerror Atom = 0x4da0e
|
||||||
|
Onmousedown Atom = 0x4e80b
|
||||||
|
Onmouseenter Atom = 0x4f30c
|
||||||
|
Onmouseleave Atom = 0x4ff0c
|
||||||
|
Onmousemove Atom = 0x50b0b
|
||||||
|
Onmouseout Atom = 0x5160a
|
||||||
|
Onmouseover Atom = 0x5230b
|
||||||
|
Onmouseup Atom = 0x52e09
|
||||||
|
Onmousewheel Atom = 0x53c0c
|
||||||
|
Onoffline Atom = 0x54809
|
||||||
|
Ononline Atom = 0x55108
|
||||||
|
Onpagehide Atom = 0x5590a
|
||||||
|
Onpageshow Atom = 0x5730a
|
||||||
|
Onpaste Atom = 0x57f07
|
||||||
|
Onpause Atom = 0x59a07
|
||||||
|
Onplay Atom = 0x5a406
|
||||||
|
Onplaying Atom = 0x5a409
|
||||||
|
Onpopstate Atom = 0x5ad0a
|
||||||
|
Onprogress Atom = 0x5b70a
|
||||||
|
Onratechange Atom = 0x5cc0c
|
||||||
|
Onrejectionhandled Atom = 0x5d812
|
||||||
|
Onreset Atom = 0x5ea07
|
||||||
|
Onresize Atom = 0x5f108
|
||||||
|
Onscroll Atom = 0x60008
|
||||||
|
Onsecuritypolicyviolation Atom = 0x60819
|
||||||
|
Onseeked Atom = 0x62908
|
||||||
|
Onseeking Atom = 0x63109
|
||||||
|
Onselect Atom = 0x63a08
|
||||||
|
Onshow Atom = 0x64406
|
||||||
|
Onsort Atom = 0x64f06
|
||||||
|
Onstalled Atom = 0x65909
|
||||||
|
Onstorage Atom = 0x66209
|
||||||
|
Onsubmit Atom = 0x66b08
|
||||||
|
Onsuspend Atom = 0x67b09
|
||||||
|
Ontimeupdate Atom = 0x400c
|
||||||
|
Ontoggle Atom = 0x68408
|
||||||
|
Onunhandledrejection Atom = 0x68c14
|
||||||
|
Onunload Atom = 0x6ab08
|
||||||
|
Onvolumechange Atom = 0x6b30e
|
||||||
|
Onwaiting Atom = 0x6c109
|
||||||
|
Onwheel Atom = 0x6ca07
|
||||||
|
Open Atom = 0x1a304
|
||||||
|
Optgroup Atom = 0x5f08
|
||||||
|
Optimum Atom = 0x6d107
|
||||||
|
Option Atom = 0x6e306
|
||||||
|
Output Atom = 0x51d06
|
||||||
|
P Atom = 0xc01
|
||||||
|
Param Atom = 0xc05
|
||||||
|
Pattern Atom = 0x6607
|
||||||
|
Picture Atom = 0x7b07
|
||||||
|
Ping Atom = 0xef04
|
||||||
|
Placeholder Atom = 0x1310b
|
||||||
|
Plaintext Atom = 0x1b209
|
||||||
|
Playsinline Atom = 0x1400b
|
||||||
|
Poster Atom = 0x2cf06
|
||||||
|
Pre Atom = 0x47003
|
||||||
|
Preload Atom = 0x48607
|
||||||
|
Progress Atom = 0x5b908
|
||||||
|
Prompt Atom = 0x53606
|
||||||
|
Public Atom = 0x58606
|
||||||
|
Q Atom = 0xcf01
|
||||||
|
Radiogroup Atom = 0x30a
|
||||||
|
Rb Atom = 0x3a02
|
||||||
|
Readonly Atom = 0x35708
|
||||||
|
Referrerpolicy Atom = 0x3d10e
|
||||||
|
Rel Atom = 0x48703
|
||||||
|
Required Atom = 0x24c08
|
||||||
|
Reversed Atom = 0x8008
|
||||||
|
Rows Atom = 0x9c04
|
||||||
|
Rowspan Atom = 0x9c07
|
||||||
|
Rp Atom = 0x23c02
|
||||||
|
Rt Atom = 0x19a02
|
||||||
|
Rtc Atom = 0x19a03
|
||||||
|
Ruby Atom = 0xfb04
|
||||||
|
S Atom = 0x2501
|
||||||
|
Samp Atom = 0x7804
|
||||||
|
Sandbox Atom = 0x12907
|
||||||
|
Scope Atom = 0x67505
|
||||||
|
Scoped Atom = 0x67506
|
||||||
|
Script Atom = 0x21806
|
||||||
|
Seamless Atom = 0x37108
|
||||||
|
Section Atom = 0x56807
|
||||||
|
Select Atom = 0x63c06
|
||||||
|
Selected Atom = 0x63c08
|
||||||
|
Shape Atom = 0x1e505
|
||||||
|
Size Atom = 0x5f504
|
||||||
|
Sizes Atom = 0x5f505
|
||||||
|
Slot Atom = 0x1ef04
|
||||||
|
Small Atom = 0x20605
|
||||||
|
Sortable Atom = 0x65108
|
||||||
|
Sorted Atom = 0x33706
|
||||||
|
Source Atom = 0x37806
|
||||||
|
Spacer Atom = 0x43706
|
||||||
|
Span Atom = 0x9f04
|
||||||
|
Spellcheck Atom = 0x4740a
|
||||||
|
Src Atom = 0x5c003
|
||||||
|
Srcdoc Atom = 0x5c006
|
||||||
|
Srclang Atom = 0x5f907
|
||||||
|
Srcset Atom = 0x6f906
|
||||||
|
Start Atom = 0x3fa05
|
||||||
|
Step Atom = 0x58304
|
||||||
|
Strike Atom = 0xd206
|
||||||
|
Strong Atom = 0x6dd06
|
||||||
|
Style Atom = 0x6ff05
|
||||||
|
Sub Atom = 0x66d03
|
||||||
|
Summary Atom = 0x70407
|
||||||
|
Sup Atom = 0x70b03
|
||||||
|
Svg Atom = 0x70e03
|
||||||
|
System Atom = 0x71106
|
||||||
|
Tabindex Atom = 0x4be08
|
||||||
|
Table Atom = 0x59505
|
||||||
|
Target Atom = 0x2c406
|
||||||
|
Tbody Atom = 0x2705
|
||||||
|
Td Atom = 0x9202
|
||||||
|
Template Atom = 0x71408
|
||||||
|
Textarea Atom = 0x35208
|
||||||
|
Tfoot Atom = 0xf505
|
||||||
|
Th Atom = 0x15602
|
||||||
|
Thead Atom = 0x33005
|
||||||
|
Time Atom = 0x4204
|
||||||
|
Title Atom = 0x11005
|
||||||
|
Tr Atom = 0xcc02
|
||||||
|
Track Atom = 0x1ba05
|
||||||
|
Translate Atom = 0x1f209
|
||||||
|
Tt Atom = 0x6802
|
||||||
|
Type Atom = 0xd904
|
||||||
|
Typemustmatch Atom = 0x2900d
|
||||||
|
U Atom = 0xb01
|
||||||
|
Ul Atom = 0xa702
|
||||||
|
Updateviacache Atom = 0x460e
|
||||||
|
Usemap Atom = 0x59e06
|
||||||
|
Value Atom = 0x1505
|
||||||
|
Var Atom = 0x16d03
|
||||||
|
Video Atom = 0x2f105
|
||||||
|
Wbr Atom = 0x57c03
|
||||||
|
Width Atom = 0x64905
|
||||||
|
Workertype Atom = 0x71c0a
|
||||||
|
Wrap Atom = 0x72604
|
||||||
|
Xmp Atom = 0x12f03
|
||||||
|
)
|
||||||
|
|
||||||
|
const hash0 = 0x81cdf10e
|
||||||
|
|
||||||
|
const maxAtomLen = 25
|
||||||
|
|
||||||
|
var table = [1 << 9]Atom{
|
||||||
|
0x1: 0xe60a, // mediagroup
|
||||||
|
0x2: 0x2e404, // lang
|
||||||
|
0x4: 0x2c09, // accesskey
|
||||||
|
0x5: 0x8b08, // frameset
|
||||||
|
0x7: 0x63a08, // onselect
|
||||||
|
0x8: 0x71106, // system
|
||||||
|
0xa: 0x64905, // width
|
||||||
|
0xc: 0x2890b, // formenctype
|
||||||
|
0xd: 0x13702, // ol
|
||||||
|
0xe: 0x3970b, // oncuechange
|
||||||
|
0x10: 0x14b03, // bdo
|
||||||
|
0x11: 0x11505, // audio
|
||||||
|
0x12: 0x17a09, // draggable
|
||||||
|
0x14: 0x2f105, // video
|
||||||
|
0x15: 0x2b102, // mn
|
||||||
|
0x16: 0x38704, // menu
|
||||||
|
0x17: 0x2cf06, // poster
|
||||||
|
0x19: 0xf606, // footer
|
||||||
|
0x1a: 0x2a806, // method
|
||||||
|
0x1b: 0x2b808, // datetime
|
||||||
|
0x1c: 0x19507, // onabort
|
||||||
|
0x1d: 0x460e, // updateviacache
|
||||||
|
0x1e: 0xff05, // async
|
||||||
|
0x1f: 0x49d06, // onload
|
||||||
|
0x21: 0x11908, // oncancel
|
||||||
|
0x22: 0x62908, // onseeked
|
||||||
|
0x23: 0x30205, // image
|
||||||
|
0x24: 0x5d812, // onrejectionhandled
|
||||||
|
0x26: 0x17404, // link
|
||||||
|
0x27: 0x51d06, // output
|
||||||
|
0x28: 0x33104, // head
|
||||||
|
0x29: 0x4ff0c, // onmouseleave
|
||||||
|
0x2a: 0x57f07, // onpaste
|
||||||
|
0x2b: 0x5a409, // onplaying
|
||||||
|
0x2c: 0x1c407, // colspan
|
||||||
|
0x2f: 0x1bf05, // color
|
||||||
|
0x30: 0x5f504, // size
|
||||||
|
0x31: 0x2e80a, // http-equiv
|
||||||
|
0x33: 0x601, // i
|
||||||
|
0x34: 0x5590a, // onpagehide
|
||||||
|
0x35: 0x68c14, // onunhandledrejection
|
||||||
|
0x37: 0x42a07, // onerror
|
||||||
|
0x3a: 0x3b08, // basefont
|
||||||
|
0x3f: 0x1303, // nav
|
||||||
|
0x40: 0x17704, // kind
|
||||||
|
0x41: 0x35708, // readonly
|
||||||
|
0x42: 0x30806, // mglyph
|
||||||
|
0x44: 0xb202, // li
|
||||||
|
0x46: 0x2d506, // hidden
|
||||||
|
0x47: 0x70e03, // svg
|
||||||
|
0x48: 0x58304, // step
|
||||||
|
0x49: 0x23f09, // integrity
|
||||||
|
0x4a: 0x58606, // public
|
||||||
|
0x4c: 0x1ab03, // col
|
||||||
|
0x4d: 0x1870a, // blockquote
|
||||||
|
0x4e: 0x34f02, // h5
|
||||||
|
0x50: 0x5b908, // progress
|
||||||
|
0x51: 0x5f505, // sizes
|
||||||
|
0x52: 0x34502, // h4
|
||||||
|
0x56: 0x33005, // thead
|
||||||
|
0x57: 0xd607, // keytype
|
||||||
|
0x58: 0x5b70a, // onprogress
|
||||||
|
0x59: 0x44b09, // inputmode
|
||||||
|
0x5a: 0x3b109, // ondragend
|
||||||
|
0x5d: 0x3a205, // oncut
|
||||||
|
0x5e: 0x43706, // spacer
|
||||||
|
0x5f: 0x1ab08, // colgroup
|
||||||
|
0x62: 0x16502, // is
|
||||||
|
0x65: 0x3c02, // as
|
||||||
|
0x66: 0x54809, // onoffline
|
||||||
|
0x67: 0x33706, // sorted
|
||||||
|
0x69: 0x48d10, // onlanguagechange
|
||||||
|
0x6c: 0x43d0c, // onhashchange
|
||||||
|
0x6d: 0x9604, // name
|
||||||
|
0x6e: 0xf505, // tfoot
|
||||||
|
0x6f: 0x56104, // desc
|
||||||
|
0x70: 0x33d03, // max
|
||||||
|
0x72: 0x1ea06, // coords
|
||||||
|
0x73: 0x30d02, // h3
|
||||||
|
0x74: 0x6e70e, // onbeforeunload
|
||||||
|
0x75: 0x9c04, // rows
|
||||||
|
0x76: 0x63c06, // select
|
||||||
|
0x77: 0x9805, // meter
|
||||||
|
0x78: 0x38b06, // itemid
|
||||||
|
0x79: 0x53c0c, // onmousewheel
|
||||||
|
0x7a: 0x5c006, // srcdoc
|
||||||
|
0x7d: 0x1ba05, // track
|
||||||
|
0x7f: 0x31f08, // itemtype
|
||||||
|
0x82: 0xa402, // mo
|
||||||
|
0x83: 0x41b08, // onchange
|
||||||
|
0x84: 0x33107, // headers
|
||||||
|
0x85: 0x5cc0c, // onratechange
|
||||||
|
0x86: 0x60819, // onsecuritypolicyviolation
|
||||||
|
0x88: 0x4a508, // datalist
|
||||||
|
0x89: 0x4e80b, // onmousedown
|
||||||
|
0x8a: 0x1ef04, // slot
|
||||||
|
0x8b: 0x4b010, // onloadedmetadata
|
||||||
|
0x8c: 0x1a06, // accept
|
||||||
|
0x8d: 0x26806, // object
|
||||||
|
0x91: 0x6b30e, // onvolumechange
|
||||||
|
0x92: 0x2107, // charset
|
||||||
|
0x93: 0x27613, // onautocompleteerror
|
||||||
|
0x94: 0xc113, // allowpaymentrequest
|
||||||
|
0x95: 0x2804, // body
|
||||||
|
0x96: 0x10a07, // default
|
||||||
|
0x97: 0x63c08, // selected
|
||||||
|
0x98: 0x21e04, // face
|
||||||
|
0x99: 0x1e505, // shape
|
||||||
|
0x9b: 0x68408, // ontoggle
|
||||||
|
0x9e: 0x64b02, // dt
|
||||||
|
0x9f: 0xb604, // mark
|
||||||
|
0xa1: 0xb01, // u
|
||||||
|
0xa4: 0x6ab08, // onunload
|
||||||
|
0xa5: 0x5d04, // loop
|
||||||
|
0xa6: 0x16408, // disabled
|
||||||
|
0xaa: 0x42307, // onended
|
||||||
|
0xab: 0xb00a, // malignmark
|
||||||
|
0xad: 0x67b09, // onsuspend
|
||||||
|
0xae: 0x35105, // mtext
|
||||||
|
0xaf: 0x64f06, // onsort
|
||||||
|
0xb0: 0x19d08, // itemprop
|
||||||
|
0xb3: 0x67109, // itemscope
|
||||||
|
0xb4: 0x17305, // blink
|
||||||
|
0xb6: 0x3b106, // ondrag
|
||||||
|
0xb7: 0xa702, // ul
|
||||||
|
0xb8: 0x26e04, // form
|
||||||
|
0xb9: 0x12907, // sandbox
|
||||||
|
0xba: 0x8b05, // frame
|
||||||
|
0xbb: 0x1505, // value
|
||||||
|
0xbc: 0x66209, // onstorage
|
||||||
|
0xbf: 0xaa07, // acronym
|
||||||
|
0xc0: 0x19a02, // rt
|
||||||
|
0xc2: 0x202, // br
|
||||||
|
0xc3: 0x22608, // fieldset
|
||||||
|
0xc4: 0x2900d, // typemustmatch
|
||||||
|
0xc5: 0xa208, // nomodule
|
||||||
|
0xc6: 0x6c07, // noembed
|
||||||
|
0xc7: 0x69e0d, // onbeforeprint
|
||||||
|
0xc8: 0x19106, // button
|
||||||
|
0xc9: 0x2f507, // onclick
|
||||||
|
0xca: 0x70407, // summary
|
||||||
|
0xcd: 0xfb04, // ruby
|
||||||
|
0xce: 0x56405, // class
|
||||||
|
0xcf: 0x3f40b, // ondragstart
|
||||||
|
0xd0: 0x23107, // caption
|
||||||
|
0xd4: 0xdd0e, // allowusermedia
|
||||||
|
0xd5: 0x4cf0b, // onloadstart
|
||||||
|
0xd9: 0x16b03, // div
|
||||||
|
0xda: 0x4a904, // list
|
||||||
|
0xdb: 0x32e04, // math
|
||||||
|
0xdc: 0x44b05, // input
|
||||||
|
0xdf: 0x3ea0a, // ondragover
|
||||||
|
0xe0: 0x2de02, // h2
|
||||||
|
0xe2: 0x1b209, // plaintext
|
||||||
|
0xe4: 0x4f30c, // onmouseenter
|
||||||
|
0xe7: 0x47907, // checked
|
||||||
|
0xe8: 0x47003, // pre
|
||||||
|
0xea: 0x35f08, // multiple
|
||||||
|
0xeb: 0xba03, // bdi
|
||||||
|
0xec: 0x33d09, // maxlength
|
||||||
|
0xed: 0xcf01, // q
|
||||||
|
0xee: 0x61f0a, // onauxclick
|
||||||
|
0xf0: 0x57c03, // wbr
|
||||||
|
0xf2: 0x3b04, // base
|
||||||
|
0xf3: 0x6e306, // option
|
||||||
|
0xf5: 0x41310, // ondurationchange
|
||||||
|
0xf7: 0x8908, // noframes
|
||||||
|
0xf9: 0x40508, // dropzone
|
||||||
|
0xfb: 0x67505, // scope
|
||||||
|
0xfc: 0x8008, // reversed
|
||||||
|
0xfd: 0x3ba0b, // ondragenter
|
||||||
|
0xfe: 0x3fa05, // start
|
||||||
|
0xff: 0x12f03, // xmp
|
||||||
|
0x100: 0x5f907, // srclang
|
||||||
|
0x101: 0x30703, // img
|
||||||
|
0x104: 0x101, // b
|
||||||
|
0x105: 0x25403, // for
|
||||||
|
0x106: 0x10705, // aside
|
||||||
|
0x107: 0x44907, // oninput
|
||||||
|
0x108: 0x35604, // area
|
||||||
|
0x109: 0x2a40a, // formmethod
|
||||||
|
0x10a: 0x72604, // wrap
|
||||||
|
0x10c: 0x23c02, // rp
|
||||||
|
0x10d: 0x46b0a, // onkeypress
|
||||||
|
0x10e: 0x6802, // tt
|
||||||
|
0x110: 0x34702, // mi
|
||||||
|
0x111: 0x36705, // muted
|
||||||
|
0x112: 0xf303, // alt
|
||||||
|
0x113: 0x5c504, // code
|
||||||
|
0x114: 0x6e02, // em
|
||||||
|
0x115: 0x3c50a, // ondragexit
|
||||||
|
0x117: 0x9f04, // span
|
||||||
|
0x119: 0x6d708, // manifest
|
||||||
|
0x11a: 0x38708, // menuitem
|
||||||
|
0x11b: 0x58b07, // content
|
||||||
|
0x11d: 0x6c109, // onwaiting
|
||||||
|
0x11f: 0x4c609, // onloadend
|
||||||
|
0x121: 0x37e0d, // oncontextmenu
|
||||||
|
0x123: 0x56d06, // onblur
|
||||||
|
0x124: 0x3fc07, // article
|
||||||
|
0x125: 0x9303, // dir
|
||||||
|
0x126: 0xef04, // ping
|
||||||
|
0x127: 0x24c08, // required
|
||||||
|
0x128: 0x45509, // oninvalid
|
||||||
|
0x129: 0xb105, // align
|
||||||
|
0x12b: 0x58a04, // icon
|
||||||
|
0x12c: 0x64d02, // h6
|
||||||
|
0x12d: 0x1c404, // cols
|
||||||
|
0x12e: 0x22e0a, // figcaption
|
||||||
|
0x12f: 0x45e09, // onkeydown
|
||||||
|
0x130: 0x66b08, // onsubmit
|
||||||
|
0x131: 0x14d09, // oncanplay
|
||||||
|
0x132: 0x70b03, // sup
|
||||||
|
0x133: 0xc01, // p
|
||||||
|
0x135: 0x40a09, // onemptied
|
||||||
|
0x136: 0x39106, // oncopy
|
||||||
|
0x137: 0x19c04, // cite
|
||||||
|
0x138: 0x3a70a, // ondblclick
|
||||||
|
0x13a: 0x50b0b, // onmousemove
|
||||||
|
0x13c: 0x66d03, // sub
|
||||||
|
0x13d: 0x48703, // rel
|
||||||
|
0x13e: 0x5f08, // optgroup
|
||||||
|
0x142: 0x9c07, // rowspan
|
||||||
|
0x143: 0x37806, // source
|
||||||
|
0x144: 0x21608, // noscript
|
||||||
|
0x145: 0x1a304, // open
|
||||||
|
0x146: 0x20403, // ins
|
||||||
|
0x147: 0x2540d, // foreignObject
|
||||||
|
0x148: 0x5ad0a, // onpopstate
|
||||||
|
0x14a: 0x28d07, // enctype
|
||||||
|
0x14b: 0x2760e, // onautocomplete
|
||||||
|
0x14c: 0x35208, // textarea
|
||||||
|
0x14e: 0x2780c, // autocomplete
|
||||||
|
0x14f: 0x15702, // hr
|
||||||
|
0x150: 0x1de08, // controls
|
||||||
|
0x151: 0x10902, // id
|
||||||
|
0x153: 0x2360c, // onafterprint
|
||||||
|
0x155: 0x2610d, // foreignobject
|
||||||
|
0x156: 0x32707, // marquee
|
||||||
|
0x157: 0x59a07, // onpause
|
||||||
|
0x158: 0x5e602, // dl
|
||||||
|
0x159: 0x5206, // height
|
||||||
|
0x15a: 0x34703, // min
|
||||||
|
0x15b: 0x9307, // dirname
|
||||||
|
0x15c: 0x1f209, // translate
|
||||||
|
0x15d: 0x5604, // html
|
||||||
|
0x15e: 0x34709, // minlength
|
||||||
|
0x15f: 0x48607, // preload
|
||||||
|
0x160: 0x71408, // template
|
||||||
|
0x161: 0x3df0b, // ondragleave
|
||||||
|
0x162: 0x3a02, // rb
|
||||||
|
0x164: 0x5c003, // src
|
||||||
|
0x165: 0x6dd06, // strong
|
||||||
|
0x167: 0x7804, // samp
|
||||||
|
0x168: 0x6f307, // address
|
||||||
|
0x169: 0x55108, // ononline
|
||||||
|
0x16b: 0x1310b, // placeholder
|
||||||
|
0x16c: 0x2c406, // target
|
||||||
|
0x16d: 0x20605, // small
|
||||||
|
0x16e: 0x6ca07, // onwheel
|
||||||
|
0x16f: 0x1c90a, // annotation
|
||||||
|
0x170: 0x4740a, // spellcheck
|
||||||
|
0x171: 0x7207, // details
|
||||||
|
0x172: 0x10306, // canvas
|
||||||
|
0x173: 0x12109, // autofocus
|
||||||
|
0x174: 0xc05, // param
|
||||||
|
0x176: 0x46308, // download
|
||||||
|
0x177: 0x45203, // del
|
||||||
|
0x178: 0x36c07, // onclose
|
||||||
|
0x179: 0xb903, // kbd
|
||||||
|
0x17a: 0x31906, // applet
|
||||||
|
0x17b: 0x2e004, // href
|
||||||
|
0x17c: 0x5f108, // onresize
|
||||||
|
0x17e: 0x49d0c, // onloadeddata
|
||||||
|
0x180: 0xcc02, // tr
|
||||||
|
0x181: 0x2c00a, // formtarget
|
||||||
|
0x182: 0x11005, // title
|
||||||
|
0x183: 0x6ff05, // style
|
||||||
|
0x184: 0xd206, // strike
|
||||||
|
0x185: 0x59e06, // usemap
|
||||||
|
0x186: 0x2fc06, // iframe
|
||||||
|
0x187: 0x1004, // main
|
||||||
|
0x189: 0x7b07, // picture
|
||||||
|
0x18c: 0x31605, // ismap
|
||||||
|
0x18e: 0x4a504, // data
|
||||||
|
0x18f: 0x5905, // label
|
||||||
|
0x191: 0x3d10e, // referrerpolicy
|
||||||
|
0x192: 0x15602, // th
|
||||||
|
0x194: 0x53606, // prompt
|
||||||
|
0x195: 0x56807, // section
|
||||||
|
0x197: 0x6d107, // optimum
|
||||||
|
0x198: 0x2db04, // high
|
||||||
|
0x199: 0x15c02, // h1
|
||||||
|
0x19a: 0x65909, // onstalled
|
||||||
|
0x19b: 0x16d03, // var
|
||||||
|
0x19c: 0x4204, // time
|
||||||
|
0x19e: 0x67402, // ms
|
||||||
|
0x19f: 0x33106, // header
|
||||||
|
0x1a0: 0x4da09, // onmessage
|
||||||
|
0x1a1: 0x1a605, // nonce
|
||||||
|
0x1a2: 0x26e0a, // formaction
|
||||||
|
0x1a3: 0x22006, // center
|
||||||
|
0x1a4: 0x3704, // nobr
|
||||||
|
0x1a5: 0x59505, // table
|
||||||
|
0x1a6: 0x4a907, // listing
|
||||||
|
0x1a7: 0x18106, // legend
|
||||||
|
0x1a9: 0x29b09, // challenge
|
||||||
|
0x1aa: 0x24806, // figure
|
||||||
|
0x1ab: 0xe605, // media
|
||||||
|
0x1ae: 0xd904, // type
|
||||||
|
0x1af: 0x3f04, // font
|
||||||
|
0x1b0: 0x4da0e, // onmessageerror
|
||||||
|
0x1b1: 0x37108, // seamless
|
||||||
|
0x1b2: 0x8703, // dfn
|
||||||
|
0x1b3: 0x5c705, // defer
|
||||||
|
0x1b4: 0xc303, // low
|
||||||
|
0x1b5: 0x19a03, // rtc
|
||||||
|
0x1b6: 0x5230b, // onmouseover
|
||||||
|
0x1b7: 0x2b20a, // novalidate
|
||||||
|
0x1b8: 0x71c0a, // workertype
|
||||||
|
0x1ba: 0x3cd07, // itemref
|
||||||
|
0x1bd: 0x1, // a
|
||||||
|
0x1be: 0x31803, // map
|
||||||
|
0x1bf: 0x400c, // ontimeupdate
|
||||||
|
0x1c0: 0x15e07, // bgsound
|
||||||
|
0x1c1: 0x3206, // keygen
|
||||||
|
0x1c2: 0x2705, // tbody
|
||||||
|
0x1c5: 0x64406, // onshow
|
||||||
|
0x1c7: 0x2501, // s
|
||||||
|
0x1c8: 0x6607, // pattern
|
||||||
|
0x1cc: 0x14d10, // oncanplaythrough
|
||||||
|
0x1ce: 0x2d702, // dd
|
||||||
|
0x1cf: 0x6f906, // srcset
|
||||||
|
0x1d0: 0x17003, // big
|
||||||
|
0x1d2: 0x65108, // sortable
|
||||||
|
0x1d3: 0x48007, // onkeyup
|
||||||
|
0x1d5: 0x5a406, // onplay
|
||||||
|
0x1d7: 0x4b804, // meta
|
||||||
|
0x1d8: 0x40306, // ondrop
|
||||||
|
0x1da: 0x60008, // onscroll
|
||||||
|
0x1db: 0x1fb0b, // crossorigin
|
||||||
|
0x1dc: 0x5730a, // onpageshow
|
||||||
|
0x1dd: 0x4, // abbr
|
||||||
|
0x1de: 0x9202, // td
|
||||||
|
0x1df: 0x58b0f, // contenteditable
|
||||||
|
0x1e0: 0x27206, // action
|
||||||
|
0x1e1: 0x1400b, // playsinline
|
||||||
|
0x1e2: 0x43107, // onfocus
|
||||||
|
0x1e3: 0x2e008, // hreflang
|
||||||
|
0x1e5: 0x5160a, // onmouseout
|
||||||
|
0x1e6: 0x5ea07, // onreset
|
||||||
|
0x1e7: 0x13c08, // autoplay
|
||||||
|
0x1e8: 0x63109, // onseeking
|
||||||
|
0x1ea: 0x67506, // scoped
|
||||||
|
0x1ec: 0x30a, // radiogroup
|
||||||
|
0x1ee: 0x3800b, // contextmenu
|
||||||
|
0x1ef: 0x52e09, // onmouseup
|
||||||
|
0x1f1: 0x2ca06, // hgroup
|
||||||
|
0x1f2: 0x2080f, // allowfullscreen
|
||||||
|
0x1f3: 0x4be08, // tabindex
|
||||||
|
0x1f6: 0x30f07, // isindex
|
||||||
|
0x1f7: 0x1a0e, // accept-charset
|
||||||
|
0x1f8: 0x2ae0e, // formnovalidate
|
||||||
|
0x1fb: 0x1c90e, // annotation-xml
|
||||||
|
0x1fc: 0x6e05, // embed
|
||||||
|
0x1fd: 0x21806, // script
|
||||||
|
0x1fe: 0xbb06, // dialog
|
||||||
|
0x1ff: 0x1d707, // command
|
||||||
|
}
|
||||||
|
|
||||||
|
const atomText = "abbradiogrouparamainavalueaccept-charsetbodyaccesskeygenobrb" +
|
||||||
|
"asefontimeupdateviacacheightmlabelooptgroupatternoembedetail" +
|
||||||
|
"sampictureversedfnoframesetdirnameterowspanomoduleacronymali" +
|
||||||
|
"gnmarkbdialogallowpaymentrequestrikeytypeallowusermediagroup" +
|
||||||
|
"ingaltfooterubyasyncanvasidefaultitleaudioncancelautofocusan" +
|
||||||
|
"dboxmplaceholderautoplaysinlinebdoncanplaythrough1bgsoundisa" +
|
||||||
|
"bledivarbigblinkindraggablegendblockquotebuttonabortcitempro" +
|
||||||
|
"penoncecolgrouplaintextrackcolorcolspannotation-xmlcommandco" +
|
||||||
|
"ntrolshapecoordslotranslatecrossoriginsmallowfullscreenoscri" +
|
||||||
|
"ptfacenterfieldsetfigcaptionafterprintegrityfigurequiredfore" +
|
||||||
|
"ignObjectforeignobjectformactionautocompleteerrorformenctype" +
|
||||||
|
"mustmatchallengeformmethodformnovalidatetimeformtargethgroup" +
|
||||||
|
"osterhiddenhigh2hreflanghttp-equivideonclickiframeimageimgly" +
|
||||||
|
"ph3isindexismappletitemtypemarqueematheadersortedmaxlength4m" +
|
||||||
|
"inlength5mtextareadonlymultiplemutedoncloseamlessourceoncont" +
|
||||||
|
"extmenuitemidoncopyoncuechangeoncutondblclickondragendondrag" +
|
||||||
|
"enterondragexitemreferrerpolicyondragleaveondragoverondragst" +
|
||||||
|
"articleondropzonemptiedondurationchangeonendedonerroronfocus" +
|
||||||
|
"paceronhashchangeoninputmodeloninvalidonkeydownloadonkeypres" +
|
||||||
|
"spellcheckedonkeyupreloadonlanguagechangeonloadeddatalisting" +
|
||||||
|
"onloadedmetadatabindexonloadendonloadstartonmessageerroronmo" +
|
||||||
|
"usedownonmouseenteronmouseleaveonmousemoveonmouseoutputonmou" +
|
||||||
|
"seoveronmouseupromptonmousewheelonofflineononlineonpagehides" +
|
||||||
|
"classectionbluronpageshowbronpastepublicontenteditableonpaus" +
|
||||||
|
"emaponplayingonpopstateonprogressrcdocodeferonratechangeonre" +
|
||||||
|
"jectionhandledonresetonresizesrclangonscrollonsecuritypolicy" +
|
||||||
|
"violationauxclickonseekedonseekingonselectedonshowidth6onsor" +
|
||||||
|
"tableonstalledonstorageonsubmitemscopedonsuspendontoggleonun" +
|
||||||
|
"handledrejectionbeforeprintonunloadonvolumechangeonwaitingon" +
|
||||||
|
"wheeloptimumanifestrongoptionbeforeunloaddressrcsetstylesumm" +
|
||||||
|
"arysupsvgsystemplateworkertypewrap"
|
104
vendor/golang.org/x/net/html/const.go
generated
vendored
Normal file
104
vendor/golang.org/x/net/html/const.go
generated
vendored
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package html
|
||||||
|
|
||||||
|
// Section 12.2.4.2 of the HTML5 specification says "The following elements
|
||||||
|
// have varying levels of special parsing rules".
|
||||||
|
// https://html.spec.whatwg.org/multipage/syntax.html#the-stack-of-open-elements
|
||||||
|
var isSpecialElementMap = map[string]bool{
|
||||||
|
"address": true,
|
||||||
|
"applet": true,
|
||||||
|
"area": true,
|
||||||
|
"article": true,
|
||||||
|
"aside": true,
|
||||||
|
"base": true,
|
||||||
|
"basefont": true,
|
||||||
|
"bgsound": true,
|
||||||
|
"blockquote": true,
|
||||||
|
"body": true,
|
||||||
|
"br": true,
|
||||||
|
"button": true,
|
||||||
|
"caption": true,
|
||||||
|
"center": true,
|
||||||
|
"col": true,
|
||||||
|
"colgroup": true,
|
||||||
|
"dd": true,
|
||||||
|
"details": true,
|
||||||
|
"dir": true,
|
||||||
|
"div": true,
|
||||||
|
"dl": true,
|
||||||
|
"dt": true,
|
||||||
|
"embed": true,
|
||||||
|
"fieldset": true,
|
||||||
|
"figcaption": true,
|
||||||
|
"figure": true,
|
||||||
|
"footer": true,
|
||||||
|
"form": true,
|
||||||
|
"frame": true,
|
||||||
|
"frameset": true,
|
||||||
|
"h1": true,
|
||||||
|
"h2": true,
|
||||||
|
"h3": true,
|
||||||
|
"h4": true,
|
||||||
|
"h5": true,
|
||||||
|
"h6": true,
|
||||||
|
"head": true,
|
||||||
|
"header": true,
|
||||||
|
"hgroup": true,
|
||||||
|
"hr": true,
|
||||||
|
"html": true,
|
||||||
|
"iframe": true,
|
||||||
|
"img": true,
|
||||||
|
"input": true,
|
||||||
|
"isindex": true, // The 'isindex' element has been removed, but keep it for backwards compatibility.
|
||||||
|
"keygen": true,
|
||||||
|
"li": true,
|
||||||
|
"link": true,
|
||||||
|
"listing": true,
|
||||||
|
"main": true,
|
||||||
|
"marquee": true,
|
||||||
|
"menu": true,
|
||||||
|
"meta": true,
|
||||||
|
"nav": true,
|
||||||
|
"noembed": true,
|
||||||
|
"noframes": true,
|
||||||
|
"noscript": true,
|
||||||
|
"object": true,
|
||||||
|
"ol": true,
|
||||||
|
"p": true,
|
||||||
|
"param": true,
|
||||||
|
"plaintext": true,
|
||||||
|
"pre": true,
|
||||||
|
"script": true,
|
||||||
|
"section": true,
|
||||||
|
"select": true,
|
||||||
|
"source": true,
|
||||||
|
"style": true,
|
||||||
|
"summary": true,
|
||||||
|
"table": true,
|
||||||
|
"tbody": true,
|
||||||
|
"td": true,
|
||||||
|
"template": true,
|
||||||
|
"textarea": true,
|
||||||
|
"tfoot": true,
|
||||||
|
"th": true,
|
||||||
|
"thead": true,
|
||||||
|
"title": true,
|
||||||
|
"tr": true,
|
||||||
|
"track": true,
|
||||||
|
"ul": true,
|
||||||
|
"wbr": true,
|
||||||
|
"xmp": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSpecialElement(element *Node) bool {
|
||||||
|
switch element.Namespace {
|
||||||
|
case "", "html":
|
||||||
|
return isSpecialElementMap[element.Data]
|
||||||
|
case "svg":
|
||||||
|
return element.Data == "foreignObject"
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
106
vendor/golang.org/x/net/html/doc.go
generated
vendored
Normal file
106
vendor/golang.org/x/net/html/doc.go
generated
vendored
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
// Copyright 2010 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package html implements an HTML5-compliant tokenizer and parser.
|
||||||
|
|
||||||
|
Tokenization is done by creating a Tokenizer for an io.Reader r. It is the
|
||||||
|
caller's responsibility to ensure that r provides UTF-8 encoded HTML.
|
||||||
|
|
||||||
|
z := html.NewTokenizer(r)
|
||||||
|
|
||||||
|
Given a Tokenizer z, the HTML is tokenized by repeatedly calling z.Next(),
|
||||||
|
which parses the next token and returns its type, or an error:
|
||||||
|
|
||||||
|
for {
|
||||||
|
tt := z.Next()
|
||||||
|
if tt == html.ErrorToken {
|
||||||
|
// ...
|
||||||
|
return ...
|
||||||
|
}
|
||||||
|
// Process the current token.
|
||||||
|
}
|
||||||
|
|
||||||
|
There are two APIs for retrieving the current token. The high-level API is to
|
||||||
|
call Token; the low-level API is to call Text or TagName / TagAttr. Both APIs
|
||||||
|
allow optionally calling Raw after Next but before Token, Text, TagName, or
|
||||||
|
TagAttr. In EBNF notation, the valid call sequence per token is:
|
||||||
|
|
||||||
|
Next {Raw} [ Token | Text | TagName {TagAttr} ]
|
||||||
|
|
||||||
|
Token returns an independent data structure that completely describes a token.
|
||||||
|
Entities (such as "<") are unescaped, tag names and attribute keys are
|
||||||
|
lower-cased, and attributes are collected into a []Attribute. For example:
|
||||||
|
|
||||||
|
for {
|
||||||
|
if z.Next() == html.ErrorToken {
|
||||||
|
// Returning io.EOF indicates success.
|
||||||
|
return z.Err()
|
||||||
|
}
|
||||||
|
emitToken(z.Token())
|
||||||
|
}
|
||||||
|
|
||||||
|
The low-level API performs fewer allocations and copies, but the contents of
|
||||||
|
the []byte values returned by Text, TagName and TagAttr may change on the next
|
||||||
|
call to Next. For example, to extract an HTML page's anchor text:
|
||||||
|
|
||||||
|
depth := 0
|
||||||
|
for {
|
||||||
|
tt := z.Next()
|
||||||
|
switch tt {
|
||||||
|
case html.ErrorToken:
|
||||||
|
return z.Err()
|
||||||
|
case html.TextToken:
|
||||||
|
if depth > 0 {
|
||||||
|
// emitBytes should copy the []byte it receives,
|
||||||
|
// if it doesn't process it immediately.
|
||||||
|
emitBytes(z.Text())
|
||||||
|
}
|
||||||
|
case html.StartTagToken, html.EndTagToken:
|
||||||
|
tn, _ := z.TagName()
|
||||||
|
if len(tn) == 1 && tn[0] == 'a' {
|
||||||
|
if tt == html.StartTagToken {
|
||||||
|
depth++
|
||||||
|
} else {
|
||||||
|
depth--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Parsing is done by calling Parse with an io.Reader, which returns the root of
|
||||||
|
the parse tree (the document element) as a *Node. It is the caller's
|
||||||
|
responsibility to ensure that the Reader provides UTF-8 encoded HTML. For
|
||||||
|
example, to process each anchor node in depth-first order:
|
||||||
|
|
||||||
|
doc, err := html.Parse(r)
|
||||||
|
if err != nil {
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
var f func(*html.Node)
|
||||||
|
f = func(n *html.Node) {
|
||||||
|
if n.Type == html.ElementNode && n.Data == "a" {
|
||||||
|
// Do something with n...
|
||||||
|
}
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
f(c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f(doc)
|
||||||
|
|
||||||
|
The relevant specifications include:
|
||||||
|
https://html.spec.whatwg.org/multipage/syntax.html and
|
||||||
|
https://html.spec.whatwg.org/multipage/syntax.html#tokenization
|
||||||
|
*/
|
||||||
|
package html // import "golang.org/x/net/html"
|
||||||
|
|
||||||
|
// The tokenization algorithm implemented by this package is not a line-by-line
|
||||||
|
// transliteration of the relatively verbose state-machine in the WHATWG
|
||||||
|
// specification. A more direct approach is used instead, where the program
|
||||||
|
// counter implies the state, such as whether it is tokenizing a tag or a text
|
||||||
|
// node. Specification compliance is verified by checking expected and actual
|
||||||
|
// outputs over a test suite rather than aiming for algorithmic fidelity.
|
||||||
|
|
||||||
|
// TODO(nigeltao): Does a DOM API belong in this package or a separate one?
|
||||||
|
// TODO(nigeltao): How does parsing interact with a JavaScript engine?
|
156
vendor/golang.org/x/net/html/doctype.go
generated
vendored
Normal file
156
vendor/golang.org/x/net/html/doctype.go
generated
vendored
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package html
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// parseDoctype parses the data from a DoctypeToken into a name,
|
||||||
|
// public identifier, and system identifier. It returns a Node whose Type
|
||||||
|
// is DoctypeNode, whose Data is the name, and which has attributes
|
||||||
|
// named "system" and "public" for the two identifiers if they were present.
|
||||||
|
// quirks is whether the document should be parsed in "quirks mode".
|
||||||
|
func parseDoctype(s string) (n *Node, quirks bool) {
|
||||||
|
n = &Node{Type: DoctypeNode}
|
||||||
|
|
||||||
|
// Find the name.
|
||||||
|
space := strings.IndexAny(s, whitespace)
|
||||||
|
if space == -1 {
|
||||||
|
space = len(s)
|
||||||
|
}
|
||||||
|
n.Data = s[:space]
|
||||||
|
// The comparison to "html" is case-sensitive.
|
||||||
|
if n.Data != "html" {
|
||||||
|
quirks = true
|
||||||
|
}
|
||||||
|
n.Data = strings.ToLower(n.Data)
|
||||||
|
s = strings.TrimLeft(s[space:], whitespace)
|
||||||
|
|
||||||
|
if len(s) < 6 {
|
||||||
|
// It can't start with "PUBLIC" or "SYSTEM".
|
||||||
|
// Ignore the rest of the string.
|
||||||
|
return n, quirks || s != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
key := strings.ToLower(s[:6])
|
||||||
|
s = s[6:]
|
||||||
|
for key == "public" || key == "system" {
|
||||||
|
s = strings.TrimLeft(s, whitespace)
|
||||||
|
if s == "" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
quote := s[0]
|
||||||
|
if quote != '"' && quote != '\'' {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
s = s[1:]
|
||||||
|
q := strings.IndexRune(s, rune(quote))
|
||||||
|
var id string
|
||||||
|
if q == -1 {
|
||||||
|
id = s
|
||||||
|
s = ""
|
||||||
|
} else {
|
||||||
|
id = s[:q]
|
||||||
|
s = s[q+1:]
|
||||||
|
}
|
||||||
|
n.Attr = append(n.Attr, Attribute{Key: key, Val: id})
|
||||||
|
if key == "public" {
|
||||||
|
key = "system"
|
||||||
|
} else {
|
||||||
|
key = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if key != "" || s != "" {
|
||||||
|
quirks = true
|
||||||
|
} else if len(n.Attr) > 0 {
|
||||||
|
if n.Attr[0].Key == "public" {
|
||||||
|
public := strings.ToLower(n.Attr[0].Val)
|
||||||
|
switch public {
|
||||||
|
case "-//w3o//dtd w3 html strict 3.0//en//", "-/w3d/dtd html 4.0 transitional/en", "html":
|
||||||
|
quirks = true
|
||||||
|
default:
|
||||||
|
for _, q := range quirkyIDs {
|
||||||
|
if strings.HasPrefix(public, q) {
|
||||||
|
quirks = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// The following two public IDs only cause quirks mode if there is no system ID.
|
||||||
|
if len(n.Attr) == 1 && (strings.HasPrefix(public, "-//w3c//dtd html 4.01 frameset//") ||
|
||||||
|
strings.HasPrefix(public, "-//w3c//dtd html 4.01 transitional//")) {
|
||||||
|
quirks = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if lastAttr := n.Attr[len(n.Attr)-1]; lastAttr.Key == "system" &&
|
||||||
|
strings.ToLower(lastAttr.Val) == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd" {
|
||||||
|
quirks = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return n, quirks
|
||||||
|
}
|
||||||
|
|
||||||
|
// quirkyIDs is a list of public doctype identifiers that cause a document
|
||||||
|
// to be interpreted in quirks mode. The identifiers should be in lower case.
|
||||||
|
var quirkyIDs = []string{
|
||||||
|
"+//silmaril//dtd html pro v0r11 19970101//",
|
||||||
|
"-//advasoft ltd//dtd html 3.0 aswedit + extensions//",
|
||||||
|
"-//as//dtd html 3.0 aswedit + extensions//",
|
||||||
|
"-//ietf//dtd html 2.0 level 1//",
|
||||||
|
"-//ietf//dtd html 2.0 level 2//",
|
||||||
|
"-//ietf//dtd html 2.0 strict level 1//",
|
||||||
|
"-//ietf//dtd html 2.0 strict level 2//",
|
||||||
|
"-//ietf//dtd html 2.0 strict//",
|
||||||
|
"-//ietf//dtd html 2.0//",
|
||||||
|
"-//ietf//dtd html 2.1e//",
|
||||||
|
"-//ietf//dtd html 3.0//",
|
||||||
|
"-//ietf//dtd html 3.2 final//",
|
||||||
|
"-//ietf//dtd html 3.2//",
|
||||||
|
"-//ietf//dtd html 3//",
|
||||||
|
"-//ietf//dtd html level 0//",
|
||||||
|
"-//ietf//dtd html level 1//",
|
||||||
|
"-//ietf//dtd html level 2//",
|
||||||
|
"-//ietf//dtd html level 3//",
|
||||||
|
"-//ietf//dtd html strict level 0//",
|
||||||
|
"-//ietf//dtd html strict level 1//",
|
||||||
|
"-//ietf//dtd html strict level 2//",
|
||||||
|
"-//ietf//dtd html strict level 3//",
|
||||||
|
"-//ietf//dtd html strict//",
|
||||||
|
"-//ietf//dtd html//",
|
||||||
|
"-//metrius//dtd metrius presentational//",
|
||||||
|
"-//microsoft//dtd internet explorer 2.0 html strict//",
|
||||||
|
"-//microsoft//dtd internet explorer 2.0 html//",
|
||||||
|
"-//microsoft//dtd internet explorer 2.0 tables//",
|
||||||
|
"-//microsoft//dtd internet explorer 3.0 html strict//",
|
||||||
|
"-//microsoft//dtd internet explorer 3.0 html//",
|
||||||
|
"-//microsoft//dtd internet explorer 3.0 tables//",
|
||||||
|
"-//netscape comm. corp.//dtd html//",
|
||||||
|
"-//netscape comm. corp.//dtd strict html//",
|
||||||
|
"-//o'reilly and associates//dtd html 2.0//",
|
||||||
|
"-//o'reilly and associates//dtd html extended 1.0//",
|
||||||
|
"-//o'reilly and associates//dtd html extended relaxed 1.0//",
|
||||||
|
"-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//",
|
||||||
|
"-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//",
|
||||||
|
"-//spyglass//dtd html 2.0 extended//",
|
||||||
|
"-//sq//dtd html 2.0 hotmetal + extensions//",
|
||||||
|
"-//sun microsystems corp.//dtd hotjava html//",
|
||||||
|
"-//sun microsystems corp.//dtd hotjava strict html//",
|
||||||
|
"-//w3c//dtd html 3 1995-03-24//",
|
||||||
|
"-//w3c//dtd html 3.2 draft//",
|
||||||
|
"-//w3c//dtd html 3.2 final//",
|
||||||
|
"-//w3c//dtd html 3.2//",
|
||||||
|
"-//w3c//dtd html 3.2s draft//",
|
||||||
|
"-//w3c//dtd html 4.0 frameset//",
|
||||||
|
"-//w3c//dtd html 4.0 transitional//",
|
||||||
|
"-//w3c//dtd html experimental 19960712//",
|
||||||
|
"-//w3c//dtd html experimental 970421//",
|
||||||
|
"-//w3c//dtd w3 html//",
|
||||||
|
"-//w3o//dtd w3 html 3.0//",
|
||||||
|
"-//webtechs//dtd mozilla html 2.0//",
|
||||||
|
"-//webtechs//dtd mozilla html//",
|
||||||
|
}
|
2253
vendor/golang.org/x/net/html/entity.go
generated
vendored
Normal file
2253
vendor/golang.org/x/net/html/entity.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
258
vendor/golang.org/x/net/html/escape.go
generated
vendored
Normal file
258
vendor/golang.org/x/net/html/escape.go
generated
vendored
Normal file
@ -0,0 +1,258 @@
|
|||||||
|
// Copyright 2010 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package html
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"strings"
|
||||||
|
"unicode/utf8"
|
||||||
|
)
|
||||||
|
|
||||||
|
// These replacements permit compatibility with old numeric entities that
|
||||||
|
// assumed Windows-1252 encoding.
|
||||||
|
// https://html.spec.whatwg.org/multipage/syntax.html#consume-a-character-reference
|
||||||
|
var replacementTable = [...]rune{
|
||||||
|
'\u20AC', // First entry is what 0x80 should be replaced with.
|
||||||
|
'\u0081',
|
||||||
|
'\u201A',
|
||||||
|
'\u0192',
|
||||||
|
'\u201E',
|
||||||
|
'\u2026',
|
||||||
|
'\u2020',
|
||||||
|
'\u2021',
|
||||||
|
'\u02C6',
|
||||||
|
'\u2030',
|
||||||
|
'\u0160',
|
||||||
|
'\u2039',
|
||||||
|
'\u0152',
|
||||||
|
'\u008D',
|
||||||
|
'\u017D',
|
||||||
|
'\u008F',
|
||||||
|
'\u0090',
|
||||||
|
'\u2018',
|
||||||
|
'\u2019',
|
||||||
|
'\u201C',
|
||||||
|
'\u201D',
|
||||||
|
'\u2022',
|
||||||
|
'\u2013',
|
||||||
|
'\u2014',
|
||||||
|
'\u02DC',
|
||||||
|
'\u2122',
|
||||||
|
'\u0161',
|
||||||
|
'\u203A',
|
||||||
|
'\u0153',
|
||||||
|
'\u009D',
|
||||||
|
'\u017E',
|
||||||
|
'\u0178', // Last entry is 0x9F.
|
||||||
|
// 0x00->'\uFFFD' is handled programmatically.
|
||||||
|
// 0x0D->'\u000D' is a no-op.
|
||||||
|
}
|
||||||
|
|
||||||
|
// unescapeEntity reads an entity like "<" from b[src:] and writes the
|
||||||
|
// corresponding "<" to b[dst:], returning the incremented dst and src cursors.
|
||||||
|
// Precondition: b[src] == '&' && dst <= src.
|
||||||
|
// attribute should be true if parsing an attribute value.
|
||||||
|
func unescapeEntity(b []byte, dst, src int, attribute bool) (dst1, src1 int) {
|
||||||
|
// https://html.spec.whatwg.org/multipage/syntax.html#consume-a-character-reference
|
||||||
|
|
||||||
|
// i starts at 1 because we already know that s[0] == '&'.
|
||||||
|
i, s := 1, b[src:]
|
||||||
|
|
||||||
|
if len(s) <= 1 {
|
||||||
|
b[dst] = b[src]
|
||||||
|
return dst + 1, src + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if s[i] == '#' {
|
||||||
|
if len(s) <= 3 { // We need to have at least "&#.".
|
||||||
|
b[dst] = b[src]
|
||||||
|
return dst + 1, src + 1
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
c := s[i]
|
||||||
|
hex := false
|
||||||
|
if c == 'x' || c == 'X' {
|
||||||
|
hex = true
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
x := '\x00'
|
||||||
|
for i < len(s) {
|
||||||
|
c = s[i]
|
||||||
|
i++
|
||||||
|
if hex {
|
||||||
|
if '0' <= c && c <= '9' {
|
||||||
|
x = 16*x + rune(c) - '0'
|
||||||
|
continue
|
||||||
|
} else if 'a' <= c && c <= 'f' {
|
||||||
|
x = 16*x + rune(c) - 'a' + 10
|
||||||
|
continue
|
||||||
|
} else if 'A' <= c && c <= 'F' {
|
||||||
|
x = 16*x + rune(c) - 'A' + 10
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
} else if '0' <= c && c <= '9' {
|
||||||
|
x = 10*x + rune(c) - '0'
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if c != ';' {
|
||||||
|
i--
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if i <= 3 { // No characters matched.
|
||||||
|
b[dst] = b[src]
|
||||||
|
return dst + 1, src + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if 0x80 <= x && x <= 0x9F {
|
||||||
|
// Replace characters from Windows-1252 with UTF-8 equivalents.
|
||||||
|
x = replacementTable[x-0x80]
|
||||||
|
} else if x == 0 || (0xD800 <= x && x <= 0xDFFF) || x > 0x10FFFF {
|
||||||
|
// Replace invalid characters with the replacement character.
|
||||||
|
x = '\uFFFD'
|
||||||
|
}
|
||||||
|
|
||||||
|
return dst + utf8.EncodeRune(b[dst:], x), src + i
|
||||||
|
}
|
||||||
|
|
||||||
|
// Consume the maximum number of characters possible, with the
|
||||||
|
// consumed characters matching one of the named references.
|
||||||
|
|
||||||
|
for i < len(s) {
|
||||||
|
c := s[i]
|
||||||
|
i++
|
||||||
|
// Lower-cased characters are more common in entities, so we check for them first.
|
||||||
|
if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if c != ';' {
|
||||||
|
i--
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
entityName := string(s[1:i])
|
||||||
|
if entityName == "" {
|
||||||
|
// No-op.
|
||||||
|
} else if attribute && entityName[len(entityName)-1] != ';' && len(s) > i && s[i] == '=' {
|
||||||
|
// No-op.
|
||||||
|
} else if x := entity[entityName]; x != 0 {
|
||||||
|
return dst + utf8.EncodeRune(b[dst:], x), src + i
|
||||||
|
} else if x := entity2[entityName]; x[0] != 0 {
|
||||||
|
dst1 := dst + utf8.EncodeRune(b[dst:], x[0])
|
||||||
|
return dst1 + utf8.EncodeRune(b[dst1:], x[1]), src + i
|
||||||
|
} else if !attribute {
|
||||||
|
maxLen := len(entityName) - 1
|
||||||
|
if maxLen > longestEntityWithoutSemicolon {
|
||||||
|
maxLen = longestEntityWithoutSemicolon
|
||||||
|
}
|
||||||
|
for j := maxLen; j > 1; j-- {
|
||||||
|
if x := entity[entityName[:j]]; x != 0 {
|
||||||
|
return dst + utf8.EncodeRune(b[dst:], x), src + j + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dst1, src1 = dst+i, src+i
|
||||||
|
copy(b[dst:dst1], b[src:src1])
|
||||||
|
return dst1, src1
|
||||||
|
}
|
||||||
|
|
||||||
|
// unescape unescapes b's entities in-place, so that "a<b" becomes "a<b".
|
||||||
|
// attribute should be true if parsing an attribute value.
|
||||||
|
func unescape(b []byte, attribute bool) []byte {
|
||||||
|
for i, c := range b {
|
||||||
|
if c == '&' {
|
||||||
|
dst, src := unescapeEntity(b, i, i, attribute)
|
||||||
|
for src < len(b) {
|
||||||
|
c := b[src]
|
||||||
|
if c == '&' {
|
||||||
|
dst, src = unescapeEntity(b, dst, src, attribute)
|
||||||
|
} else {
|
||||||
|
b[dst] = c
|
||||||
|
dst, src = dst+1, src+1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b[0:dst]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// lower lower-cases the A-Z bytes in b in-place, so that "aBc" becomes "abc".
|
||||||
|
func lower(b []byte) []byte {
|
||||||
|
for i, c := range b {
|
||||||
|
if 'A' <= c && c <= 'Z' {
|
||||||
|
b[i] = c + 'a' - 'A'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
const escapedChars = "&'<>\"\r"
|
||||||
|
|
||||||
|
func escape(w writer, s string) error {
|
||||||
|
i := strings.IndexAny(s, escapedChars)
|
||||||
|
for i != -1 {
|
||||||
|
if _, err := w.WriteString(s[:i]); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
var esc string
|
||||||
|
switch s[i] {
|
||||||
|
case '&':
|
||||||
|
esc = "&"
|
||||||
|
case '\'':
|
||||||
|
// "'" is shorter than "'" and apos was not in HTML until HTML5.
|
||||||
|
esc = "'"
|
||||||
|
case '<':
|
||||||
|
esc = "<"
|
||||||
|
case '>':
|
||||||
|
esc = ">"
|
||||||
|
case '"':
|
||||||
|
// """ is shorter than """.
|
||||||
|
esc = """
|
||||||
|
case '\r':
|
||||||
|
esc = " "
|
||||||
|
default:
|
||||||
|
panic("unrecognized escape character")
|
||||||
|
}
|
||||||
|
s = s[i+1:]
|
||||||
|
if _, err := w.WriteString(esc); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
i = strings.IndexAny(s, escapedChars)
|
||||||
|
}
|
||||||
|
_, err := w.WriteString(s)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// EscapeString escapes special characters like "<" to become "<". It
|
||||||
|
// escapes only five such characters: <, >, &, ' and ".
|
||||||
|
// UnescapeString(EscapeString(s)) == s always holds, but the converse isn't
|
||||||
|
// always true.
|
||||||
|
func EscapeString(s string) string {
|
||||||
|
if strings.IndexAny(s, escapedChars) == -1 {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
var buf bytes.Buffer
|
||||||
|
escape(&buf, s)
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnescapeString unescapes entities like "<" to become "<". It unescapes a
|
||||||
|
// larger range of entities than EscapeString escapes. For example, "á"
|
||||||
|
// unescapes to "á", as does "á" and "&xE1;".
|
||||||
|
// UnescapeString(EscapeString(s)) == s always holds, but the converse isn't
|
||||||
|
// always true.
|
||||||
|
func UnescapeString(s string) string {
|
||||||
|
for _, c := range s {
|
||||||
|
if c == '&' {
|
||||||
|
return string(unescape([]byte(s), false))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
226
vendor/golang.org/x/net/html/foreign.go
generated
vendored
Normal file
226
vendor/golang.org/x/net/html/foreign.go
generated
vendored
Normal file
@ -0,0 +1,226 @@
|
|||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package html
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func adjustAttributeNames(aa []Attribute, nameMap map[string]string) {
|
||||||
|
for i := range aa {
|
||||||
|
if newName, ok := nameMap[aa[i].Key]; ok {
|
||||||
|
aa[i].Key = newName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func adjustForeignAttributes(aa []Attribute) {
|
||||||
|
for i, a := range aa {
|
||||||
|
if a.Key == "" || a.Key[0] != 'x' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch a.Key {
|
||||||
|
case "xlink:actuate", "xlink:arcrole", "xlink:href", "xlink:role", "xlink:show",
|
||||||
|
"xlink:title", "xlink:type", "xml:base", "xml:lang", "xml:space", "xmlns:xlink":
|
||||||
|
j := strings.Index(a.Key, ":")
|
||||||
|
aa[i].Namespace = a.Key[:j]
|
||||||
|
aa[i].Key = a.Key[j+1:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func htmlIntegrationPoint(n *Node) bool {
|
||||||
|
if n.Type != ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
switch n.Namespace {
|
||||||
|
case "math":
|
||||||
|
if n.Data == "annotation-xml" {
|
||||||
|
for _, a := range n.Attr {
|
||||||
|
if a.Key == "encoding" {
|
||||||
|
val := strings.ToLower(a.Val)
|
||||||
|
if val == "text/html" || val == "application/xhtml+xml" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case "svg":
|
||||||
|
switch n.Data {
|
||||||
|
case "desc", "foreignObject", "title":
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func mathMLTextIntegrationPoint(n *Node) bool {
|
||||||
|
if n.Namespace != "math" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
switch n.Data {
|
||||||
|
case "mi", "mo", "mn", "ms", "mtext":
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Section 12.2.6.5.
|
||||||
|
var breakout = map[string]bool{
|
||||||
|
"b": true,
|
||||||
|
"big": true,
|
||||||
|
"blockquote": true,
|
||||||
|
"body": true,
|
||||||
|
"br": true,
|
||||||
|
"center": true,
|
||||||
|
"code": true,
|
||||||
|
"dd": true,
|
||||||
|
"div": true,
|
||||||
|
"dl": true,
|
||||||
|
"dt": true,
|
||||||
|
"em": true,
|
||||||
|
"embed": true,
|
||||||
|
"h1": true,
|
||||||
|
"h2": true,
|
||||||
|
"h3": true,
|
||||||
|
"h4": true,
|
||||||
|
"h5": true,
|
||||||
|
"h6": true,
|
||||||
|
"head": true,
|
||||||
|
"hr": true,
|
||||||
|
"i": true,
|
||||||
|
"img": true,
|
||||||
|
"li": true,
|
||||||
|
"listing": true,
|
||||||
|
"menu": true,
|
||||||
|
"meta": true,
|
||||||
|
"nobr": true,
|
||||||
|
"ol": true,
|
||||||
|
"p": true,
|
||||||
|
"pre": true,
|
||||||
|
"ruby": true,
|
||||||
|
"s": true,
|
||||||
|
"small": true,
|
||||||
|
"span": true,
|
||||||
|
"strong": true,
|
||||||
|
"strike": true,
|
||||||
|
"sub": true,
|
||||||
|
"sup": true,
|
||||||
|
"table": true,
|
||||||
|
"tt": true,
|
||||||
|
"u": true,
|
||||||
|
"ul": true,
|
||||||
|
"var": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Section 12.2.6.5.
|
||||||
|
var svgTagNameAdjustments = map[string]string{
|
||||||
|
"altglyph": "altGlyph",
|
||||||
|
"altglyphdef": "altGlyphDef",
|
||||||
|
"altglyphitem": "altGlyphItem",
|
||||||
|
"animatecolor": "animateColor",
|
||||||
|
"animatemotion": "animateMotion",
|
||||||
|
"animatetransform": "animateTransform",
|
||||||
|
"clippath": "clipPath",
|
||||||
|
"feblend": "feBlend",
|
||||||
|
"fecolormatrix": "feColorMatrix",
|
||||||
|
"fecomponenttransfer": "feComponentTransfer",
|
||||||
|
"fecomposite": "feComposite",
|
||||||
|
"feconvolvematrix": "feConvolveMatrix",
|
||||||
|
"fediffuselighting": "feDiffuseLighting",
|
||||||
|
"fedisplacementmap": "feDisplacementMap",
|
||||||
|
"fedistantlight": "feDistantLight",
|
||||||
|
"feflood": "feFlood",
|
||||||
|
"fefunca": "feFuncA",
|
||||||
|
"fefuncb": "feFuncB",
|
||||||
|
"fefuncg": "feFuncG",
|
||||||
|
"fefuncr": "feFuncR",
|
||||||
|
"fegaussianblur": "feGaussianBlur",
|
||||||
|
"feimage": "feImage",
|
||||||
|
"femerge": "feMerge",
|
||||||
|
"femergenode": "feMergeNode",
|
||||||
|
"femorphology": "feMorphology",
|
||||||
|
"feoffset": "feOffset",
|
||||||
|
"fepointlight": "fePointLight",
|
||||||
|
"fespecularlighting": "feSpecularLighting",
|
||||||
|
"fespotlight": "feSpotLight",
|
||||||
|
"fetile": "feTile",
|
||||||
|
"feturbulence": "feTurbulence",
|
||||||
|
"foreignobject": "foreignObject",
|
||||||
|
"glyphref": "glyphRef",
|
||||||
|
"lineargradient": "linearGradient",
|
||||||
|
"radialgradient": "radialGradient",
|
||||||
|
"textpath": "textPath",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Section 12.2.6.1
|
||||||
|
var mathMLAttributeAdjustments = map[string]string{
|
||||||
|
"definitionurl": "definitionURL",
|
||||||
|
}
|
||||||
|
|
||||||
|
var svgAttributeAdjustments = map[string]string{
|
||||||
|
"attributename": "attributeName",
|
||||||
|
"attributetype": "attributeType",
|
||||||
|
"basefrequency": "baseFrequency",
|
||||||
|
"baseprofile": "baseProfile",
|
||||||
|
"calcmode": "calcMode",
|
||||||
|
"clippathunits": "clipPathUnits",
|
||||||
|
"contentscripttype": "contentScriptType",
|
||||||
|
"contentstyletype": "contentStyleType",
|
||||||
|
"diffuseconstant": "diffuseConstant",
|
||||||
|
"edgemode": "edgeMode",
|
||||||
|
"externalresourcesrequired": "externalResourcesRequired",
|
||||||
|
"filterres": "filterRes",
|
||||||
|
"filterunits": "filterUnits",
|
||||||
|
"glyphref": "glyphRef",
|
||||||
|
"gradienttransform": "gradientTransform",
|
||||||
|
"gradientunits": "gradientUnits",
|
||||||
|
"kernelmatrix": "kernelMatrix",
|
||||||
|
"kernelunitlength": "kernelUnitLength",
|
||||||
|
"keypoints": "keyPoints",
|
||||||
|
"keysplines": "keySplines",
|
||||||
|
"keytimes": "keyTimes",
|
||||||
|
"lengthadjust": "lengthAdjust",
|
||||||
|
"limitingconeangle": "limitingConeAngle",
|
||||||
|
"markerheight": "markerHeight",
|
||||||
|
"markerunits": "markerUnits",
|
||||||
|
"markerwidth": "markerWidth",
|
||||||
|
"maskcontentunits": "maskContentUnits",
|
||||||
|
"maskunits": "maskUnits",
|
||||||
|
"numoctaves": "numOctaves",
|
||||||
|
"pathlength": "pathLength",
|
||||||
|
"patterncontentunits": "patternContentUnits",
|
||||||
|
"patterntransform": "patternTransform",
|
||||||
|
"patternunits": "patternUnits",
|
||||||
|
"pointsatx": "pointsAtX",
|
||||||
|
"pointsaty": "pointsAtY",
|
||||||
|
"pointsatz": "pointsAtZ",
|
||||||
|
"preservealpha": "preserveAlpha",
|
||||||
|
"preserveaspectratio": "preserveAspectRatio",
|
||||||
|
"primitiveunits": "primitiveUnits",
|
||||||
|
"refx": "refX",
|
||||||
|
"refy": "refY",
|
||||||
|
"repeatcount": "repeatCount",
|
||||||
|
"repeatdur": "repeatDur",
|
||||||
|
"requiredextensions": "requiredExtensions",
|
||||||
|
"requiredfeatures": "requiredFeatures",
|
||||||
|
"specularconstant": "specularConstant",
|
||||||
|
"specularexponent": "specularExponent",
|
||||||
|
"spreadmethod": "spreadMethod",
|
||||||
|
"startoffset": "startOffset",
|
||||||
|
"stddeviation": "stdDeviation",
|
||||||
|
"stitchtiles": "stitchTiles",
|
||||||
|
"surfacescale": "surfaceScale",
|
||||||
|
"systemlanguage": "systemLanguage",
|
||||||
|
"tablevalues": "tableValues",
|
||||||
|
"targetx": "targetX",
|
||||||
|
"targety": "targetY",
|
||||||
|
"textlength": "textLength",
|
||||||
|
"viewbox": "viewBox",
|
||||||
|
"viewtarget": "viewTarget",
|
||||||
|
"xchannelselector": "xChannelSelector",
|
||||||
|
"ychannelselector": "yChannelSelector",
|
||||||
|
"zoomandpan": "zoomAndPan",
|
||||||
|
}
|
220
vendor/golang.org/x/net/html/node.go
generated
vendored
Normal file
220
vendor/golang.org/x/net/html/node.go
generated
vendored
Normal file
@ -0,0 +1,220 @@
|
|||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package html
|
||||||
|
|
||||||
|
import (
|
||||||
|
"golang.org/x/net/html/atom"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A NodeType is the type of a Node.
|
||||||
|
type NodeType uint32
|
||||||
|
|
||||||
|
const (
|
||||||
|
ErrorNode NodeType = iota
|
||||||
|
TextNode
|
||||||
|
DocumentNode
|
||||||
|
ElementNode
|
||||||
|
CommentNode
|
||||||
|
DoctypeNode
|
||||||
|
scopeMarkerNode
|
||||||
|
)
|
||||||
|
|
||||||
|
// Section 12.2.4.3 says "The markers are inserted when entering applet,
|
||||||
|
// object, marquee, template, td, th, and caption elements, and are used
|
||||||
|
// to prevent formatting from "leaking" into applet, object, marquee,
|
||||||
|
// template, td, th, and caption elements".
|
||||||
|
var scopeMarker = Node{Type: scopeMarkerNode}
|
||||||
|
|
||||||
|
// A Node consists of a NodeType and some Data (tag name for element nodes,
|
||||||
|
// content for text) and are part of a tree of Nodes. Element nodes may also
|
||||||
|
// have a Namespace and contain a slice of Attributes. Data is unescaped, so
|
||||||
|
// that it looks like "a<b" rather than "a<b". For element nodes, DataAtom
|
||||||
|
// is the atom for Data, or zero if Data is not a known tag name.
|
||||||
|
//
|
||||||
|
// An empty Namespace implies a "http://www.w3.org/1999/xhtml" namespace.
|
||||||
|
// Similarly, "math" is short for "http://www.w3.org/1998/Math/MathML", and
|
||||||
|
// "svg" is short for "http://www.w3.org/2000/svg".
|
||||||
|
type Node struct {
|
||||||
|
Parent, FirstChild, LastChild, PrevSibling, NextSibling *Node
|
||||||
|
|
||||||
|
Type NodeType
|
||||||
|
DataAtom atom.Atom
|
||||||
|
Data string
|
||||||
|
Namespace string
|
||||||
|
Attr []Attribute
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertBefore inserts newChild as a child of n, immediately before oldChild
|
||||||
|
// in the sequence of n's children. oldChild may be nil, in which case newChild
|
||||||
|
// is appended to the end of n's children.
|
||||||
|
//
|
||||||
|
// It will panic if newChild already has a parent or siblings.
|
||||||
|
func (n *Node) InsertBefore(newChild, oldChild *Node) {
|
||||||
|
if newChild.Parent != nil || newChild.PrevSibling != nil || newChild.NextSibling != nil {
|
||||||
|
panic("html: InsertBefore called for an attached child Node")
|
||||||
|
}
|
||||||
|
var prev, next *Node
|
||||||
|
if oldChild != nil {
|
||||||
|
prev, next = oldChild.PrevSibling, oldChild
|
||||||
|
} else {
|
||||||
|
prev = n.LastChild
|
||||||
|
}
|
||||||
|
if prev != nil {
|
||||||
|
prev.NextSibling = newChild
|
||||||
|
} else {
|
||||||
|
n.FirstChild = newChild
|
||||||
|
}
|
||||||
|
if next != nil {
|
||||||
|
next.PrevSibling = newChild
|
||||||
|
} else {
|
||||||
|
n.LastChild = newChild
|
||||||
|
}
|
||||||
|
newChild.Parent = n
|
||||||
|
newChild.PrevSibling = prev
|
||||||
|
newChild.NextSibling = next
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppendChild adds a node c as a child of n.
|
||||||
|
//
|
||||||
|
// It will panic if c already has a parent or siblings.
|
||||||
|
func (n *Node) AppendChild(c *Node) {
|
||||||
|
if c.Parent != nil || c.PrevSibling != nil || c.NextSibling != nil {
|
||||||
|
panic("html: AppendChild called for an attached child Node")
|
||||||
|
}
|
||||||
|
last := n.LastChild
|
||||||
|
if last != nil {
|
||||||
|
last.NextSibling = c
|
||||||
|
} else {
|
||||||
|
n.FirstChild = c
|
||||||
|
}
|
||||||
|
n.LastChild = c
|
||||||
|
c.Parent = n
|
||||||
|
c.PrevSibling = last
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveChild removes a node c that is a child of n. Afterwards, c will have
|
||||||
|
// no parent and no siblings.
|
||||||
|
//
|
||||||
|
// It will panic if c's parent is not n.
|
||||||
|
func (n *Node) RemoveChild(c *Node) {
|
||||||
|
if c.Parent != n {
|
||||||
|
panic("html: RemoveChild called for a non-child Node")
|
||||||
|
}
|
||||||
|
if n.FirstChild == c {
|
||||||
|
n.FirstChild = c.NextSibling
|
||||||
|
}
|
||||||
|
if c.NextSibling != nil {
|
||||||
|
c.NextSibling.PrevSibling = c.PrevSibling
|
||||||
|
}
|
||||||
|
if n.LastChild == c {
|
||||||
|
n.LastChild = c.PrevSibling
|
||||||
|
}
|
||||||
|
if c.PrevSibling != nil {
|
||||||
|
c.PrevSibling.NextSibling = c.NextSibling
|
||||||
|
}
|
||||||
|
c.Parent = nil
|
||||||
|
c.PrevSibling = nil
|
||||||
|
c.NextSibling = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// reparentChildren reparents all of src's child nodes to dst.
|
||||||
|
func reparentChildren(dst, src *Node) {
|
||||||
|
for {
|
||||||
|
child := src.FirstChild
|
||||||
|
if child == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
src.RemoveChild(child)
|
||||||
|
dst.AppendChild(child)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// clone returns a new node with the same type, data and attributes.
|
||||||
|
// The clone has no parent, no siblings and no children.
|
||||||
|
func (n *Node) clone() *Node {
|
||||||
|
m := &Node{
|
||||||
|
Type: n.Type,
|
||||||
|
DataAtom: n.DataAtom,
|
||||||
|
Data: n.Data,
|
||||||
|
Attr: make([]Attribute, len(n.Attr)),
|
||||||
|
}
|
||||||
|
copy(m.Attr, n.Attr)
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
// nodeStack is a stack of nodes.
|
||||||
|
type nodeStack []*Node
|
||||||
|
|
||||||
|
// pop pops the stack. It will panic if s is empty.
|
||||||
|
func (s *nodeStack) pop() *Node {
|
||||||
|
i := len(*s)
|
||||||
|
n := (*s)[i-1]
|
||||||
|
*s = (*s)[:i-1]
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
// top returns the most recently pushed node, or nil if s is empty.
|
||||||
|
func (s *nodeStack) top() *Node {
|
||||||
|
if i := len(*s); i > 0 {
|
||||||
|
return (*s)[i-1]
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// index returns the index of the top-most occurrence of n in the stack, or -1
|
||||||
|
// if n is not present.
|
||||||
|
func (s *nodeStack) index(n *Node) int {
|
||||||
|
for i := len(*s) - 1; i >= 0; i-- {
|
||||||
|
if (*s)[i] == n {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// contains returns whether a is within s.
|
||||||
|
func (s *nodeStack) contains(a atom.Atom) bool {
|
||||||
|
for _, n := range *s {
|
||||||
|
if n.DataAtom == a {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert inserts a node at the given index.
|
||||||
|
func (s *nodeStack) insert(i int, n *Node) {
|
||||||
|
(*s) = append(*s, nil)
|
||||||
|
copy((*s)[i+1:], (*s)[i:])
|
||||||
|
(*s)[i] = n
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove removes a node from the stack. It is a no-op if n is not present.
|
||||||
|
func (s *nodeStack) remove(n *Node) {
|
||||||
|
i := s.index(n)
|
||||||
|
if i == -1 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
copy((*s)[i:], (*s)[i+1:])
|
||||||
|
j := len(*s) - 1
|
||||||
|
(*s)[j] = nil
|
||||||
|
*s = (*s)[:j]
|
||||||
|
}
|
||||||
|
|
||||||
|
type insertionModeStack []insertionMode
|
||||||
|
|
||||||
|
func (s *insertionModeStack) pop() (im insertionMode) {
|
||||||
|
i := len(*s)
|
||||||
|
im = (*s)[i-1]
|
||||||
|
*s = (*s)[:i-1]
|
||||||
|
return im
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *insertionModeStack) top() insertionMode {
|
||||||
|
if i := len(*s); i > 0 {
|
||||||
|
return (*s)[i-1]
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
2313
vendor/golang.org/x/net/html/parse.go
generated
vendored
Normal file
2313
vendor/golang.org/x/net/html/parse.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
271
vendor/golang.org/x/net/html/render.go
generated
vendored
Normal file
271
vendor/golang.org/x/net/html/render.go
generated
vendored
Normal file
@ -0,0 +1,271 @@
|
|||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package html
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type writer interface {
|
||||||
|
io.Writer
|
||||||
|
io.ByteWriter
|
||||||
|
WriteString(string) (int, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render renders the parse tree n to the given writer.
|
||||||
|
//
|
||||||
|
// Rendering is done on a 'best effort' basis: calling Parse on the output of
|
||||||
|
// Render will always result in something similar to the original tree, but it
|
||||||
|
// is not necessarily an exact clone unless the original tree was 'well-formed'.
|
||||||
|
// 'Well-formed' is not easily specified; the HTML5 specification is
|
||||||
|
// complicated.
|
||||||
|
//
|
||||||
|
// Calling Parse on arbitrary input typically results in a 'well-formed' parse
|
||||||
|
// tree. However, it is possible for Parse to yield a 'badly-formed' parse tree.
|
||||||
|
// For example, in a 'well-formed' parse tree, no <a> element is a child of
|
||||||
|
// another <a> element: parsing "<a><a>" results in two sibling elements.
|
||||||
|
// Similarly, in a 'well-formed' parse tree, no <a> element is a child of a
|
||||||
|
// <table> element: parsing "<p><table><a>" results in a <p> with two sibling
|
||||||
|
// children; the <a> is reparented to the <table>'s parent. However, calling
|
||||||
|
// Parse on "<a><table><a>" does not return an error, but the result has an <a>
|
||||||
|
// element with an <a> child, and is therefore not 'well-formed'.
|
||||||
|
//
|
||||||
|
// Programmatically constructed trees are typically also 'well-formed', but it
|
||||||
|
// is possible to construct a tree that looks innocuous but, when rendered and
|
||||||
|
// re-parsed, results in a different tree. A simple example is that a solitary
|
||||||
|
// text node would become a tree containing <html>, <head> and <body> elements.
|
||||||
|
// Another example is that the programmatic equivalent of "a<head>b</head>c"
|
||||||
|
// becomes "<html><head><head/><body>abc</body></html>".
|
||||||
|
func Render(w io.Writer, n *Node) error {
|
||||||
|
if x, ok := w.(writer); ok {
|
||||||
|
return render(x, n)
|
||||||
|
}
|
||||||
|
buf := bufio.NewWriter(w)
|
||||||
|
if err := render(buf, n); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return buf.Flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
// plaintextAbort is returned from render1 when a <plaintext> element
|
||||||
|
// has been rendered. No more end tags should be rendered after that.
|
||||||
|
var plaintextAbort = errors.New("html: internal error (plaintext abort)")
|
||||||
|
|
||||||
|
func render(w writer, n *Node) error {
|
||||||
|
err := render1(w, n)
|
||||||
|
if err == plaintextAbort {
|
||||||
|
err = nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func render1(w writer, n *Node) error {
|
||||||
|
// Render non-element nodes; these are the easy cases.
|
||||||
|
switch n.Type {
|
||||||
|
case ErrorNode:
|
||||||
|
return errors.New("html: cannot render an ErrorNode node")
|
||||||
|
case TextNode:
|
||||||
|
return escape(w, n.Data)
|
||||||
|
case DocumentNode:
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if err := render1(w, c); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case ElementNode:
|
||||||
|
// No-op.
|
||||||
|
case CommentNode:
|
||||||
|
if _, err := w.WriteString("<!--"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := w.WriteString(n.Data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := w.WriteString("-->"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case DoctypeNode:
|
||||||
|
if _, err := w.WriteString("<!DOCTYPE "); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := w.WriteString(n.Data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if n.Attr != nil {
|
||||||
|
var p, s string
|
||||||
|
for _, a := range n.Attr {
|
||||||
|
switch a.Key {
|
||||||
|
case "public":
|
||||||
|
p = a.Val
|
||||||
|
case "system":
|
||||||
|
s = a.Val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if p != "" {
|
||||||
|
if _, err := w.WriteString(" PUBLIC "); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := writeQuoted(w, p); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if s != "" {
|
||||||
|
if err := w.WriteByte(' '); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := writeQuoted(w, s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if s != "" {
|
||||||
|
if _, err := w.WriteString(" SYSTEM "); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := writeQuoted(w, s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return w.WriteByte('>')
|
||||||
|
default:
|
||||||
|
return errors.New("html: unknown node type")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render the <xxx> opening tag.
|
||||||
|
if err := w.WriteByte('<'); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := w.WriteString(n.Data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, a := range n.Attr {
|
||||||
|
if err := w.WriteByte(' '); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if a.Namespace != "" {
|
||||||
|
if _, err := w.WriteString(a.Namespace); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := w.WriteByte(':'); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, err := w.WriteString(a.Key); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := w.WriteString(`="`); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := escape(w, a.Val); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := w.WriteByte('"'); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if voidElements[n.Data] {
|
||||||
|
if n.FirstChild != nil {
|
||||||
|
return fmt.Errorf("html: void element <%s> has child nodes", n.Data)
|
||||||
|
}
|
||||||
|
_, err := w.WriteString("/>")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := w.WriteByte('>'); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add initial newline where there is danger of a newline beging ignored.
|
||||||
|
if c := n.FirstChild; c != nil && c.Type == TextNode && strings.HasPrefix(c.Data, "\n") {
|
||||||
|
switch n.Data {
|
||||||
|
case "pre", "listing", "textarea":
|
||||||
|
if err := w.WriteByte('\n'); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render any child nodes.
|
||||||
|
switch n.Data {
|
||||||
|
case "iframe", "noembed", "noframes", "noscript", "plaintext", "script", "style", "xmp":
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if c.Type == TextNode {
|
||||||
|
if _, err := w.WriteString(c.Data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := render1(w, c); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if n.Data == "plaintext" {
|
||||||
|
// Don't render anything else. <plaintext> must be the
|
||||||
|
// last element in the file, with no closing tag.
|
||||||
|
return plaintextAbort
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||||
|
if err := render1(w, c); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render the </xxx> closing tag.
|
||||||
|
if _, err := w.WriteString("</"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := w.WriteString(n.Data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return w.WriteByte('>')
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeQuoted writes s to w surrounded by quotes. Normally it will use double
|
||||||
|
// quotes, but if s contains a double quote, it will use single quotes.
|
||||||
|
// It is used for writing the identifiers in a doctype declaration.
|
||||||
|
// In valid HTML, they can't contain both types of quotes.
|
||||||
|
func writeQuoted(w writer, s string) error {
|
||||||
|
var q byte = '"'
|
||||||
|
if strings.Contains(s, `"`) {
|
||||||
|
q = '\''
|
||||||
|
}
|
||||||
|
if err := w.WriteByte(q); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := w.WriteString(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := w.WriteByte(q); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Section 12.1.2, "Elements", gives this list of void elements. Void elements
|
||||||
|
// are those that can't have any contents.
|
||||||
|
var voidElements = map[string]bool{
|
||||||
|
"area": true,
|
||||||
|
"base": true,
|
||||||
|
"br": true,
|
||||||
|
"col": true,
|
||||||
|
"command": true,
|
||||||
|
"embed": true,
|
||||||
|
"hr": true,
|
||||||
|
"img": true,
|
||||||
|
"input": true,
|
||||||
|
"keygen": true,
|
||||||
|
"link": true,
|
||||||
|
"meta": true,
|
||||||
|
"param": true,
|
||||||
|
"source": true,
|
||||||
|
"track": true,
|
||||||
|
"wbr": true,
|
||||||
|
}
|
1219
vendor/golang.org/x/net/html/token.go
generated
vendored
Normal file
1219
vendor/golang.org/x/net/html/token.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
12
vendor/gopkg.in/yaml.v2/.travis.yml
generated
vendored
Normal file
12
vendor/gopkg.in/yaml.v2/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
language: go
|
||||||
|
|
||||||
|
go:
|
||||||
|
- 1.4
|
||||||
|
- 1.5
|
||||||
|
- 1.6
|
||||||
|
- 1.7
|
||||||
|
- 1.8
|
||||||
|
- 1.9
|
||||||
|
- tip
|
||||||
|
|
||||||
|
go_import_path: gopkg.in/yaml.v2
|
201
vendor/gopkg.in/yaml.v2/LICENSE
generated
vendored
Normal file
201
vendor/gopkg.in/yaml.v2/LICENSE
generated
vendored
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright {yyyy} {name of copyright owner}
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
31
vendor/gopkg.in/yaml.v2/LICENSE.libyaml
generated
vendored
Normal file
31
vendor/gopkg.in/yaml.v2/LICENSE.libyaml
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
The following files were ported to Go from C files of libyaml, and thus
|
||||||
|
are still covered by their original copyright and license:
|
||||||
|
|
||||||
|
apic.go
|
||||||
|
emitterc.go
|
||||||
|
parserc.go
|
||||||
|
readerc.go
|
||||||
|
scannerc.go
|
||||||
|
writerc.go
|
||||||
|
yamlh.go
|
||||||
|
yamlprivateh.go
|
||||||
|
|
||||||
|
Copyright (c) 2006 Kirill Simonov
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
13
vendor/gopkg.in/yaml.v2/NOTICE
generated
vendored
Normal file
13
vendor/gopkg.in/yaml.v2/NOTICE
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
Copyright 2011-2016 Canonical Ltd.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
133
vendor/gopkg.in/yaml.v2/README.md
generated
vendored
Normal file
133
vendor/gopkg.in/yaml.v2/README.md
generated
vendored
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
# YAML support for the Go language
|
||||||
|
|
||||||
|
Introduction
|
||||||
|
------------
|
||||||
|
|
||||||
|
The yaml package enables Go programs to comfortably encode and decode YAML
|
||||||
|
values. It was developed within [Canonical](https://www.canonical.com) as
|
||||||
|
part of the [juju](https://juju.ubuntu.com) project, and is based on a
|
||||||
|
pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
|
||||||
|
C library to parse and generate YAML data quickly and reliably.
|
||||||
|
|
||||||
|
Compatibility
|
||||||
|
-------------
|
||||||
|
|
||||||
|
The yaml package supports most of YAML 1.1 and 1.2, including support for
|
||||||
|
anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
|
||||||
|
implemented, and base-60 floats from YAML 1.1 are purposefully not
|
||||||
|
supported since they're a poor design and are gone in YAML 1.2.
|
||||||
|
|
||||||
|
Installation and usage
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
The import path for the package is *gopkg.in/yaml.v2*.
|
||||||
|
|
||||||
|
To install it, run:
|
||||||
|
|
||||||
|
go get gopkg.in/yaml.v2
|
||||||
|
|
||||||
|
API documentation
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
If opened in a browser, the import path itself leads to the API documentation:
|
||||||
|
|
||||||
|
* [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2)
|
||||||
|
|
||||||
|
API stability
|
||||||
|
-------------
|
||||||
|
|
||||||
|
The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in).
|
||||||
|
|
||||||
|
|
||||||
|
License
|
||||||
|
-------
|
||||||
|
|
||||||
|
The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
|
||||||
|
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
|
||||||
|
```Go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
var data = `
|
||||||
|
a: Easy!
|
||||||
|
b:
|
||||||
|
c: 2
|
||||||
|
d: [3, 4]
|
||||||
|
`
|
||||||
|
|
||||||
|
// Note: struct fields must be public in order for unmarshal to
|
||||||
|
// correctly populate the data.
|
||||||
|
type T struct {
|
||||||
|
A string
|
||||||
|
B struct {
|
||||||
|
RenamedC int `yaml:"c"`
|
||||||
|
D []int `yaml:",flow"`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
t := T{}
|
||||||
|
|
||||||
|
err := yaml.Unmarshal([]byte(data), &t)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("error: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("--- t:\n%v\n\n", t)
|
||||||
|
|
||||||
|
d, err := yaml.Marshal(&t)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("error: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("--- t dump:\n%s\n\n", string(d))
|
||||||
|
|
||||||
|
m := make(map[interface{}]interface{})
|
||||||
|
|
||||||
|
err = yaml.Unmarshal([]byte(data), &m)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("error: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("--- m:\n%v\n\n", m)
|
||||||
|
|
||||||
|
d, err = yaml.Marshal(&m)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("error: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("--- m dump:\n%s\n\n", string(d))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This example will generate the following output:
|
||||||
|
|
||||||
|
```
|
||||||
|
--- t:
|
||||||
|
{Easy! {2 [3 4]}}
|
||||||
|
|
||||||
|
--- t dump:
|
||||||
|
a: Easy!
|
||||||
|
b:
|
||||||
|
c: 2
|
||||||
|
d: [3, 4]
|
||||||
|
|
||||||
|
|
||||||
|
--- m:
|
||||||
|
map[a:Easy! b:map[c:2 d:[3 4]]]
|
||||||
|
|
||||||
|
--- m dump:
|
||||||
|
a: Easy!
|
||||||
|
b:
|
||||||
|
c: 2
|
||||||
|
d:
|
||||||
|
- 3
|
||||||
|
- 4
|
||||||
|
```
|
||||||
|
|
739
vendor/gopkg.in/yaml.v2/apic.go
generated
vendored
Normal file
739
vendor/gopkg.in/yaml.v2/apic.go
generated
vendored
Normal file
@ -0,0 +1,739 @@
|
|||||||
|
package yaml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
|
||||||
|
//fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens))
|
||||||
|
|
||||||
|
// Check if we can move the queue at the beginning of the buffer.
|
||||||
|
if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) {
|
||||||
|
if parser.tokens_head != len(parser.tokens) {
|
||||||
|
copy(parser.tokens, parser.tokens[parser.tokens_head:])
|
||||||
|
}
|
||||||
|
parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head]
|
||||||
|
parser.tokens_head = 0
|
||||||
|
}
|
||||||
|
parser.tokens = append(parser.tokens, *token)
|
||||||
|
if pos < 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:])
|
||||||
|
parser.tokens[parser.tokens_head+pos] = *token
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new parser object.
|
||||||
|
func yaml_parser_initialize(parser *yaml_parser_t) bool {
|
||||||
|
*parser = yaml_parser_t{
|
||||||
|
raw_buffer: make([]byte, 0, input_raw_buffer_size),
|
||||||
|
buffer: make([]byte, 0, input_buffer_size),
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Destroy a parser object.
|
||||||
|
func yaml_parser_delete(parser *yaml_parser_t) {
|
||||||
|
*parser = yaml_parser_t{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// String read handler.
|
||||||
|
func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
|
||||||
|
if parser.input_pos == len(parser.input) {
|
||||||
|
return 0, io.EOF
|
||||||
|
}
|
||||||
|
n = copy(buffer, parser.input[parser.input_pos:])
|
||||||
|
parser.input_pos += n
|
||||||
|
return n, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reader read handler.
|
||||||
|
func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
|
||||||
|
return parser.input_reader.Read(buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set a string input.
|
||||||
|
func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
|
||||||
|
if parser.read_handler != nil {
|
||||||
|
panic("must set the input source only once")
|
||||||
|
}
|
||||||
|
parser.read_handler = yaml_string_read_handler
|
||||||
|
parser.input = input
|
||||||
|
parser.input_pos = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set a file input.
|
||||||
|
func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) {
|
||||||
|
if parser.read_handler != nil {
|
||||||
|
panic("must set the input source only once")
|
||||||
|
}
|
||||||
|
parser.read_handler = yaml_reader_read_handler
|
||||||
|
parser.input_reader = r
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the source encoding.
|
||||||
|
func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
|
||||||
|
if parser.encoding != yaml_ANY_ENCODING {
|
||||||
|
panic("must set the encoding only once")
|
||||||
|
}
|
||||||
|
parser.encoding = encoding
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new emitter object.
|
||||||
|
func yaml_emitter_initialize(emitter *yaml_emitter_t) {
|
||||||
|
*emitter = yaml_emitter_t{
|
||||||
|
buffer: make([]byte, output_buffer_size),
|
||||||
|
raw_buffer: make([]byte, 0, output_raw_buffer_size),
|
||||||
|
states: make([]yaml_emitter_state_t, 0, initial_stack_size),
|
||||||
|
events: make([]yaml_event_t, 0, initial_queue_size),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Destroy an emitter object.
|
||||||
|
func yaml_emitter_delete(emitter *yaml_emitter_t) {
|
||||||
|
*emitter = yaml_emitter_t{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// String write handler.
|
||||||
|
func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
|
||||||
|
*emitter.output_buffer = append(*emitter.output_buffer, buffer...)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// yaml_writer_write_handler uses emitter.output_writer to write the
|
||||||
|
// emitted text.
|
||||||
|
func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
|
||||||
|
_, err := emitter.output_writer.Write(buffer)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set a string output.
|
||||||
|
func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) {
|
||||||
|
if emitter.write_handler != nil {
|
||||||
|
panic("must set the output target only once")
|
||||||
|
}
|
||||||
|
emitter.write_handler = yaml_string_write_handler
|
||||||
|
emitter.output_buffer = output_buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set a file output.
|
||||||
|
func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) {
|
||||||
|
if emitter.write_handler != nil {
|
||||||
|
panic("must set the output target only once")
|
||||||
|
}
|
||||||
|
emitter.write_handler = yaml_writer_write_handler
|
||||||
|
emitter.output_writer = w
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the output encoding.
|
||||||
|
func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) {
|
||||||
|
if emitter.encoding != yaml_ANY_ENCODING {
|
||||||
|
panic("must set the output encoding only once")
|
||||||
|
}
|
||||||
|
emitter.encoding = encoding
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the canonical output style.
|
||||||
|
func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) {
|
||||||
|
emitter.canonical = canonical
|
||||||
|
}
|
||||||
|
|
||||||
|
//// Set the indentation increment.
|
||||||
|
func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) {
|
||||||
|
if indent < 2 || indent > 9 {
|
||||||
|
indent = 2
|
||||||
|
}
|
||||||
|
emitter.best_indent = indent
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the preferred line width.
|
||||||
|
func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) {
|
||||||
|
if width < 0 {
|
||||||
|
width = -1
|
||||||
|
}
|
||||||
|
emitter.best_width = width
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set if unescaped non-ASCII characters are allowed.
|
||||||
|
func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) {
|
||||||
|
emitter.unicode = unicode
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the preferred line break character.
|
||||||
|
func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
|
||||||
|
emitter.line_break = line_break
|
||||||
|
}
|
||||||
|
|
||||||
|
///*
|
||||||
|
// * Destroy a token object.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(void)
|
||||||
|
//yaml_token_delete(yaml_token_t *token)
|
||||||
|
//{
|
||||||
|
// assert(token); // Non-NULL token object expected.
|
||||||
|
//
|
||||||
|
// switch (token.type)
|
||||||
|
// {
|
||||||
|
// case YAML_TAG_DIRECTIVE_TOKEN:
|
||||||
|
// yaml_free(token.data.tag_directive.handle);
|
||||||
|
// yaml_free(token.data.tag_directive.prefix);
|
||||||
|
// break;
|
||||||
|
//
|
||||||
|
// case YAML_ALIAS_TOKEN:
|
||||||
|
// yaml_free(token.data.alias.value);
|
||||||
|
// break;
|
||||||
|
//
|
||||||
|
// case YAML_ANCHOR_TOKEN:
|
||||||
|
// yaml_free(token.data.anchor.value);
|
||||||
|
// break;
|
||||||
|
//
|
||||||
|
// case YAML_TAG_TOKEN:
|
||||||
|
// yaml_free(token.data.tag.handle);
|
||||||
|
// yaml_free(token.data.tag.suffix);
|
||||||
|
// break;
|
||||||
|
//
|
||||||
|
// case YAML_SCALAR_TOKEN:
|
||||||
|
// yaml_free(token.data.scalar.value);
|
||||||
|
// break;
|
||||||
|
//
|
||||||
|
// default:
|
||||||
|
// break;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// memset(token, 0, sizeof(yaml_token_t));
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
///*
|
||||||
|
// * Check if a string is a valid UTF-8 sequence.
|
||||||
|
// *
|
||||||
|
// * Check 'reader.c' for more details on UTF-8 encoding.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//static int
|
||||||
|
//yaml_check_utf8(yaml_char_t *start, size_t length)
|
||||||
|
//{
|
||||||
|
// yaml_char_t *end = start+length;
|
||||||
|
// yaml_char_t *pointer = start;
|
||||||
|
//
|
||||||
|
// while (pointer < end) {
|
||||||
|
// unsigned char octet;
|
||||||
|
// unsigned int width;
|
||||||
|
// unsigned int value;
|
||||||
|
// size_t k;
|
||||||
|
//
|
||||||
|
// octet = pointer[0];
|
||||||
|
// width = (octet & 0x80) == 0x00 ? 1 :
|
||||||
|
// (octet & 0xE0) == 0xC0 ? 2 :
|
||||||
|
// (octet & 0xF0) == 0xE0 ? 3 :
|
||||||
|
// (octet & 0xF8) == 0xF0 ? 4 : 0;
|
||||||
|
// value = (octet & 0x80) == 0x00 ? octet & 0x7F :
|
||||||
|
// (octet & 0xE0) == 0xC0 ? octet & 0x1F :
|
||||||
|
// (octet & 0xF0) == 0xE0 ? octet & 0x0F :
|
||||||
|
// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0;
|
||||||
|
// if (!width) return 0;
|
||||||
|
// if (pointer+width > end) return 0;
|
||||||
|
// for (k = 1; k < width; k ++) {
|
||||||
|
// octet = pointer[k];
|
||||||
|
// if ((octet & 0xC0) != 0x80) return 0;
|
||||||
|
// value = (value << 6) + (octet & 0x3F);
|
||||||
|
// }
|
||||||
|
// if (!((width == 1) ||
|
||||||
|
// (width == 2 && value >= 0x80) ||
|
||||||
|
// (width == 3 && value >= 0x800) ||
|
||||||
|
// (width == 4 && value >= 0x10000))) return 0;
|
||||||
|
//
|
||||||
|
// pointer += width;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// return 1;
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
|
||||||
|
// Create STREAM-START.
|
||||||
|
func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) {
|
||||||
|
*event = yaml_event_t{
|
||||||
|
typ: yaml_STREAM_START_EVENT,
|
||||||
|
encoding: encoding,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create STREAM-END.
|
||||||
|
func yaml_stream_end_event_initialize(event *yaml_event_t) {
|
||||||
|
*event = yaml_event_t{
|
||||||
|
typ: yaml_STREAM_END_EVENT,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create DOCUMENT-START.
|
||||||
|
func yaml_document_start_event_initialize(
|
||||||
|
event *yaml_event_t,
|
||||||
|
version_directive *yaml_version_directive_t,
|
||||||
|
tag_directives []yaml_tag_directive_t,
|
||||||
|
implicit bool,
|
||||||
|
) {
|
||||||
|
*event = yaml_event_t{
|
||||||
|
typ: yaml_DOCUMENT_START_EVENT,
|
||||||
|
version_directive: version_directive,
|
||||||
|
tag_directives: tag_directives,
|
||||||
|
implicit: implicit,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create DOCUMENT-END.
|
||||||
|
func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) {
|
||||||
|
*event = yaml_event_t{
|
||||||
|
typ: yaml_DOCUMENT_END_EVENT,
|
||||||
|
implicit: implicit,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
///*
|
||||||
|
// * Create ALIAS.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(int)
|
||||||
|
//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t)
|
||||||
|
//{
|
||||||
|
// mark yaml_mark_t = { 0, 0, 0 }
|
||||||
|
// anchor_copy *yaml_char_t = NULL
|
||||||
|
//
|
||||||
|
// assert(event) // Non-NULL event object is expected.
|
||||||
|
// assert(anchor) // Non-NULL anchor is expected.
|
||||||
|
//
|
||||||
|
// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0
|
||||||
|
//
|
||||||
|
// anchor_copy = yaml_strdup(anchor)
|
||||||
|
// if (!anchor_copy)
|
||||||
|
// return 0
|
||||||
|
//
|
||||||
|
// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark)
|
||||||
|
//
|
||||||
|
// return 1
|
||||||
|
//}
|
||||||
|
|
||||||
|
// Create SCALAR.
|
||||||
|
func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool {
|
||||||
|
*event = yaml_event_t{
|
||||||
|
typ: yaml_SCALAR_EVENT,
|
||||||
|
anchor: anchor,
|
||||||
|
tag: tag,
|
||||||
|
value: value,
|
||||||
|
implicit: plain_implicit,
|
||||||
|
quoted_implicit: quoted_implicit,
|
||||||
|
style: yaml_style_t(style),
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create SEQUENCE-START.
|
||||||
|
func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool {
|
||||||
|
*event = yaml_event_t{
|
||||||
|
typ: yaml_SEQUENCE_START_EVENT,
|
||||||
|
anchor: anchor,
|
||||||
|
tag: tag,
|
||||||
|
implicit: implicit,
|
||||||
|
style: yaml_style_t(style),
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create SEQUENCE-END.
|
||||||
|
func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
|
||||||
|
*event = yaml_event_t{
|
||||||
|
typ: yaml_SEQUENCE_END_EVENT,
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create MAPPING-START.
|
||||||
|
func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) {
|
||||||
|
*event = yaml_event_t{
|
||||||
|
typ: yaml_MAPPING_START_EVENT,
|
||||||
|
anchor: anchor,
|
||||||
|
tag: tag,
|
||||||
|
implicit: implicit,
|
||||||
|
style: yaml_style_t(style),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create MAPPING-END.
|
||||||
|
func yaml_mapping_end_event_initialize(event *yaml_event_t) {
|
||||||
|
*event = yaml_event_t{
|
||||||
|
typ: yaml_MAPPING_END_EVENT,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Destroy an event object.
|
||||||
|
func yaml_event_delete(event *yaml_event_t) {
|
||||||
|
*event = yaml_event_t{}
|
||||||
|
}
|
||||||
|
|
||||||
|
///*
|
||||||
|
// * Create a document object.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(int)
|
||||||
|
//yaml_document_initialize(document *yaml_document_t,
|
||||||
|
// version_directive *yaml_version_directive_t,
|
||||||
|
// tag_directives_start *yaml_tag_directive_t,
|
||||||
|
// tag_directives_end *yaml_tag_directive_t,
|
||||||
|
// start_implicit int, end_implicit int)
|
||||||
|
//{
|
||||||
|
// struct {
|
||||||
|
// error yaml_error_type_t
|
||||||
|
// } context
|
||||||
|
// struct {
|
||||||
|
// start *yaml_node_t
|
||||||
|
// end *yaml_node_t
|
||||||
|
// top *yaml_node_t
|
||||||
|
// } nodes = { NULL, NULL, NULL }
|
||||||
|
// version_directive_copy *yaml_version_directive_t = NULL
|
||||||
|
// struct {
|
||||||
|
// start *yaml_tag_directive_t
|
||||||
|
// end *yaml_tag_directive_t
|
||||||
|
// top *yaml_tag_directive_t
|
||||||
|
// } tag_directives_copy = { NULL, NULL, NULL }
|
||||||
|
// value yaml_tag_directive_t = { NULL, NULL }
|
||||||
|
// mark yaml_mark_t = { 0, 0, 0 }
|
||||||
|
//
|
||||||
|
// assert(document) // Non-NULL document object is expected.
|
||||||
|
// assert((tag_directives_start && tag_directives_end) ||
|
||||||
|
// (tag_directives_start == tag_directives_end))
|
||||||
|
// // Valid tag directives are expected.
|
||||||
|
//
|
||||||
|
// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error
|
||||||
|
//
|
||||||
|
// if (version_directive) {
|
||||||
|
// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t))
|
||||||
|
// if (!version_directive_copy) goto error
|
||||||
|
// version_directive_copy.major = version_directive.major
|
||||||
|
// version_directive_copy.minor = version_directive.minor
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// if (tag_directives_start != tag_directives_end) {
|
||||||
|
// tag_directive *yaml_tag_directive_t
|
||||||
|
// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE))
|
||||||
|
// goto error
|
||||||
|
// for (tag_directive = tag_directives_start
|
||||||
|
// tag_directive != tag_directives_end; tag_directive ++) {
|
||||||
|
// assert(tag_directive.handle)
|
||||||
|
// assert(tag_directive.prefix)
|
||||||
|
// if (!yaml_check_utf8(tag_directive.handle,
|
||||||
|
// strlen((char *)tag_directive.handle)))
|
||||||
|
// goto error
|
||||||
|
// if (!yaml_check_utf8(tag_directive.prefix,
|
||||||
|
// strlen((char *)tag_directive.prefix)))
|
||||||
|
// goto error
|
||||||
|
// value.handle = yaml_strdup(tag_directive.handle)
|
||||||
|
// value.prefix = yaml_strdup(tag_directive.prefix)
|
||||||
|
// if (!value.handle || !value.prefix) goto error
|
||||||
|
// if (!PUSH(&context, tag_directives_copy, value))
|
||||||
|
// goto error
|
||||||
|
// value.handle = NULL
|
||||||
|
// value.prefix = NULL
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy,
|
||||||
|
// tag_directives_copy.start, tag_directives_copy.top,
|
||||||
|
// start_implicit, end_implicit, mark, mark)
|
||||||
|
//
|
||||||
|
// return 1
|
||||||
|
//
|
||||||
|
//error:
|
||||||
|
// STACK_DEL(&context, nodes)
|
||||||
|
// yaml_free(version_directive_copy)
|
||||||
|
// while (!STACK_EMPTY(&context, tag_directives_copy)) {
|
||||||
|
// value yaml_tag_directive_t = POP(&context, tag_directives_copy)
|
||||||
|
// yaml_free(value.handle)
|
||||||
|
// yaml_free(value.prefix)
|
||||||
|
// }
|
||||||
|
// STACK_DEL(&context, tag_directives_copy)
|
||||||
|
// yaml_free(value.handle)
|
||||||
|
// yaml_free(value.prefix)
|
||||||
|
//
|
||||||
|
// return 0
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
///*
|
||||||
|
// * Destroy a document object.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(void)
|
||||||
|
//yaml_document_delete(document *yaml_document_t)
|
||||||
|
//{
|
||||||
|
// struct {
|
||||||
|
// error yaml_error_type_t
|
||||||
|
// } context
|
||||||
|
// tag_directive *yaml_tag_directive_t
|
||||||
|
//
|
||||||
|
// context.error = YAML_NO_ERROR // Eliminate a compiler warning.
|
||||||
|
//
|
||||||
|
// assert(document) // Non-NULL document object is expected.
|
||||||
|
//
|
||||||
|
// while (!STACK_EMPTY(&context, document.nodes)) {
|
||||||
|
// node yaml_node_t = POP(&context, document.nodes)
|
||||||
|
// yaml_free(node.tag)
|
||||||
|
// switch (node.type) {
|
||||||
|
// case YAML_SCALAR_NODE:
|
||||||
|
// yaml_free(node.data.scalar.value)
|
||||||
|
// break
|
||||||
|
// case YAML_SEQUENCE_NODE:
|
||||||
|
// STACK_DEL(&context, node.data.sequence.items)
|
||||||
|
// break
|
||||||
|
// case YAML_MAPPING_NODE:
|
||||||
|
// STACK_DEL(&context, node.data.mapping.pairs)
|
||||||
|
// break
|
||||||
|
// default:
|
||||||
|
// assert(0) // Should not happen.
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// STACK_DEL(&context, document.nodes)
|
||||||
|
//
|
||||||
|
// yaml_free(document.version_directive)
|
||||||
|
// for (tag_directive = document.tag_directives.start
|
||||||
|
// tag_directive != document.tag_directives.end
|
||||||
|
// tag_directive++) {
|
||||||
|
// yaml_free(tag_directive.handle)
|
||||||
|
// yaml_free(tag_directive.prefix)
|
||||||
|
// }
|
||||||
|
// yaml_free(document.tag_directives.start)
|
||||||
|
//
|
||||||
|
// memset(document, 0, sizeof(yaml_document_t))
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
///**
|
||||||
|
// * Get a document node.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(yaml_node_t *)
|
||||||
|
//yaml_document_get_node(document *yaml_document_t, index int)
|
||||||
|
//{
|
||||||
|
// assert(document) // Non-NULL document object is expected.
|
||||||
|
//
|
||||||
|
// if (index > 0 && document.nodes.start + index <= document.nodes.top) {
|
||||||
|
// return document.nodes.start + index - 1
|
||||||
|
// }
|
||||||
|
// return NULL
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
///**
|
||||||
|
// * Get the root object.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(yaml_node_t *)
|
||||||
|
//yaml_document_get_root_node(document *yaml_document_t)
|
||||||
|
//{
|
||||||
|
// assert(document) // Non-NULL document object is expected.
|
||||||
|
//
|
||||||
|
// if (document.nodes.top != document.nodes.start) {
|
||||||
|
// return document.nodes.start
|
||||||
|
// }
|
||||||
|
// return NULL
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
///*
|
||||||
|
// * Add a scalar node to a document.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(int)
|
||||||
|
//yaml_document_add_scalar(document *yaml_document_t,
|
||||||
|
// tag *yaml_char_t, value *yaml_char_t, length int,
|
||||||
|
// style yaml_scalar_style_t)
|
||||||
|
//{
|
||||||
|
// struct {
|
||||||
|
// error yaml_error_type_t
|
||||||
|
// } context
|
||||||
|
// mark yaml_mark_t = { 0, 0, 0 }
|
||||||
|
// tag_copy *yaml_char_t = NULL
|
||||||
|
// value_copy *yaml_char_t = NULL
|
||||||
|
// node yaml_node_t
|
||||||
|
//
|
||||||
|
// assert(document) // Non-NULL document object is expected.
|
||||||
|
// assert(value) // Non-NULL value is expected.
|
||||||
|
//
|
||||||
|
// if (!tag) {
|
||||||
|
// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
|
||||||
|
// tag_copy = yaml_strdup(tag)
|
||||||
|
// if (!tag_copy) goto error
|
||||||
|
//
|
||||||
|
// if (length < 0) {
|
||||||
|
// length = strlen((char *)value)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// if (!yaml_check_utf8(value, length)) goto error
|
||||||
|
// value_copy = yaml_malloc(length+1)
|
||||||
|
// if (!value_copy) goto error
|
||||||
|
// memcpy(value_copy, value, length)
|
||||||
|
// value_copy[length] = '\0'
|
||||||
|
//
|
||||||
|
// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark)
|
||||||
|
// if (!PUSH(&context, document.nodes, node)) goto error
|
||||||
|
//
|
||||||
|
// return document.nodes.top - document.nodes.start
|
||||||
|
//
|
||||||
|
//error:
|
||||||
|
// yaml_free(tag_copy)
|
||||||
|
// yaml_free(value_copy)
|
||||||
|
//
|
||||||
|
// return 0
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
///*
|
||||||
|
// * Add a sequence node to a document.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(int)
|
||||||
|
//yaml_document_add_sequence(document *yaml_document_t,
|
||||||
|
// tag *yaml_char_t, style yaml_sequence_style_t)
|
||||||
|
//{
|
||||||
|
// struct {
|
||||||
|
// error yaml_error_type_t
|
||||||
|
// } context
|
||||||
|
// mark yaml_mark_t = { 0, 0, 0 }
|
||||||
|
// tag_copy *yaml_char_t = NULL
|
||||||
|
// struct {
|
||||||
|
// start *yaml_node_item_t
|
||||||
|
// end *yaml_node_item_t
|
||||||
|
// top *yaml_node_item_t
|
||||||
|
// } items = { NULL, NULL, NULL }
|
||||||
|
// node yaml_node_t
|
||||||
|
//
|
||||||
|
// assert(document) // Non-NULL document object is expected.
|
||||||
|
//
|
||||||
|
// if (!tag) {
|
||||||
|
// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
|
||||||
|
// tag_copy = yaml_strdup(tag)
|
||||||
|
// if (!tag_copy) goto error
|
||||||
|
//
|
||||||
|
// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error
|
||||||
|
//
|
||||||
|
// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end,
|
||||||
|
// style, mark, mark)
|
||||||
|
// if (!PUSH(&context, document.nodes, node)) goto error
|
||||||
|
//
|
||||||
|
// return document.nodes.top - document.nodes.start
|
||||||
|
//
|
||||||
|
//error:
|
||||||
|
// STACK_DEL(&context, items)
|
||||||
|
// yaml_free(tag_copy)
|
||||||
|
//
|
||||||
|
// return 0
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
///*
|
||||||
|
// * Add a mapping node to a document.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(int)
|
||||||
|
//yaml_document_add_mapping(document *yaml_document_t,
|
||||||
|
// tag *yaml_char_t, style yaml_mapping_style_t)
|
||||||
|
//{
|
||||||
|
// struct {
|
||||||
|
// error yaml_error_type_t
|
||||||
|
// } context
|
||||||
|
// mark yaml_mark_t = { 0, 0, 0 }
|
||||||
|
// tag_copy *yaml_char_t = NULL
|
||||||
|
// struct {
|
||||||
|
// start *yaml_node_pair_t
|
||||||
|
// end *yaml_node_pair_t
|
||||||
|
// top *yaml_node_pair_t
|
||||||
|
// } pairs = { NULL, NULL, NULL }
|
||||||
|
// node yaml_node_t
|
||||||
|
//
|
||||||
|
// assert(document) // Non-NULL document object is expected.
|
||||||
|
//
|
||||||
|
// if (!tag) {
|
||||||
|
// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
|
||||||
|
// tag_copy = yaml_strdup(tag)
|
||||||
|
// if (!tag_copy) goto error
|
||||||
|
//
|
||||||
|
// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error
|
||||||
|
//
|
||||||
|
// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end,
|
||||||
|
// style, mark, mark)
|
||||||
|
// if (!PUSH(&context, document.nodes, node)) goto error
|
||||||
|
//
|
||||||
|
// return document.nodes.top - document.nodes.start
|
||||||
|
//
|
||||||
|
//error:
|
||||||
|
// STACK_DEL(&context, pairs)
|
||||||
|
// yaml_free(tag_copy)
|
||||||
|
//
|
||||||
|
// return 0
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
///*
|
||||||
|
// * Append an item to a sequence node.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(int)
|
||||||
|
//yaml_document_append_sequence_item(document *yaml_document_t,
|
||||||
|
// sequence int, item int)
|
||||||
|
//{
|
||||||
|
// struct {
|
||||||
|
// error yaml_error_type_t
|
||||||
|
// } context
|
||||||
|
//
|
||||||
|
// assert(document) // Non-NULL document is required.
|
||||||
|
// assert(sequence > 0
|
||||||
|
// && document.nodes.start + sequence <= document.nodes.top)
|
||||||
|
// // Valid sequence id is required.
|
||||||
|
// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE)
|
||||||
|
// // A sequence node is required.
|
||||||
|
// assert(item > 0 && document.nodes.start + item <= document.nodes.top)
|
||||||
|
// // Valid item id is required.
|
||||||
|
//
|
||||||
|
// if (!PUSH(&context,
|
||||||
|
// document.nodes.start[sequence-1].data.sequence.items, item))
|
||||||
|
// return 0
|
||||||
|
//
|
||||||
|
// return 1
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
///*
|
||||||
|
// * Append a pair of a key and a value to a mapping node.
|
||||||
|
// */
|
||||||
|
//
|
||||||
|
//YAML_DECLARE(int)
|
||||||
|
//yaml_document_append_mapping_pair(document *yaml_document_t,
|
||||||
|
// mapping int, key int, value int)
|
||||||
|
//{
|
||||||
|
// struct {
|
||||||
|
// error yaml_error_type_t
|
||||||
|
// } context
|
||||||
|
//
|
||||||
|
// pair yaml_node_pair_t
|
||||||
|
//
|
||||||
|
// assert(document) // Non-NULL document is required.
|
||||||
|
// assert(mapping > 0
|
||||||
|
// && document.nodes.start + mapping <= document.nodes.top)
|
||||||
|
// // Valid mapping id is required.
|
||||||
|
// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE)
|
||||||
|
// // A mapping node is required.
|
||||||
|
// assert(key > 0 && document.nodes.start + key <= document.nodes.top)
|
||||||
|
// // Valid key id is required.
|
||||||
|
// assert(value > 0 && document.nodes.start + value <= document.nodes.top)
|
||||||
|
// // Valid value id is required.
|
||||||
|
//
|
||||||
|
// pair.key = key
|
||||||
|
// pair.value = value
|
||||||
|
//
|
||||||
|
// if (!PUSH(&context,
|
||||||
|
// document.nodes.start[mapping-1].data.mapping.pairs, pair))
|
||||||
|
// return 0
|
||||||
|
//
|
||||||
|
// return 1
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
//
|
775
vendor/gopkg.in/yaml.v2/decode.go
generated
vendored
Normal file
775
vendor/gopkg.in/yaml.v2/decode.go
generated
vendored
Normal file
@ -0,0 +1,775 @@
|
|||||||
|
package yaml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding"
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"math"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
documentNode = 1 << iota
|
||||||
|
mappingNode
|
||||||
|
sequenceNode
|
||||||
|
scalarNode
|
||||||
|
aliasNode
|
||||||
|
)
|
||||||
|
|
||||||
|
type node struct {
|
||||||
|
kind int
|
||||||
|
line, column int
|
||||||
|
tag string
|
||||||
|
// For an alias node, alias holds the resolved alias.
|
||||||
|
alias *node
|
||||||
|
value string
|
||||||
|
implicit bool
|
||||||
|
children []*node
|
||||||
|
anchors map[string]*node
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
// Parser, produces a node tree out of a libyaml event stream.
|
||||||
|
|
||||||
|
type parser struct {
|
||||||
|
parser yaml_parser_t
|
||||||
|
event yaml_event_t
|
||||||
|
doc *node
|
||||||
|
doneInit bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newParser(b []byte) *parser {
|
||||||
|
p := parser{}
|
||||||
|
if !yaml_parser_initialize(&p.parser) {
|
||||||
|
panic("failed to initialize YAML emitter")
|
||||||
|
}
|
||||||
|
if len(b) == 0 {
|
||||||
|
b = []byte{'\n'}
|
||||||
|
}
|
||||||
|
yaml_parser_set_input_string(&p.parser, b)
|
||||||
|
return &p
|
||||||
|
}
|
||||||
|
|
||||||
|
func newParserFromReader(r io.Reader) *parser {
|
||||||
|
p := parser{}
|
||||||
|
if !yaml_parser_initialize(&p.parser) {
|
||||||
|
panic("failed to initialize YAML emitter")
|
||||||
|
}
|
||||||
|
yaml_parser_set_input_reader(&p.parser, r)
|
||||||
|
return &p
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) init() {
|
||||||
|
if p.doneInit {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
p.expect(yaml_STREAM_START_EVENT)
|
||||||
|
p.doneInit = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) destroy() {
|
||||||
|
if p.event.typ != yaml_NO_EVENT {
|
||||||
|
yaml_event_delete(&p.event)
|
||||||
|
}
|
||||||
|
yaml_parser_delete(&p.parser)
|
||||||
|
}
|
||||||
|
|
||||||
|
// expect consumes an event from the event stream and
|
||||||
|
// checks that it's of the expected type.
|
||||||
|
func (p *parser) expect(e yaml_event_type_t) {
|
||||||
|
if p.event.typ == yaml_NO_EVENT {
|
||||||
|
if !yaml_parser_parse(&p.parser, &p.event) {
|
||||||
|
p.fail()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if p.event.typ == yaml_STREAM_END_EVENT {
|
||||||
|
failf("attempted to go past the end of stream; corrupted value?")
|
||||||
|
}
|
||||||
|
if p.event.typ != e {
|
||||||
|
p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ)
|
||||||
|
p.fail()
|
||||||
|
}
|
||||||
|
yaml_event_delete(&p.event)
|
||||||
|
p.event.typ = yaml_NO_EVENT
|
||||||
|
}
|
||||||
|
|
||||||
|
// peek peeks at the next event in the event stream,
|
||||||
|
// puts the results into p.event and returns the event type.
|
||||||
|
func (p *parser) peek() yaml_event_type_t {
|
||||||
|
if p.event.typ != yaml_NO_EVENT {
|
||||||
|
return p.event.typ
|
||||||
|
}
|
||||||
|
if !yaml_parser_parse(&p.parser, &p.event) {
|
||||||
|
p.fail()
|
||||||
|
}
|
||||||
|
return p.event.typ
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) fail() {
|
||||||
|
var where string
|
||||||
|
var line int
|
||||||
|
if p.parser.problem_mark.line != 0 {
|
||||||
|
line = p.parser.problem_mark.line
|
||||||
|
// Scanner errors don't iterate line before returning error
|
||||||
|
if p.parser.error == yaml_SCANNER_ERROR {
|
||||||
|
line++
|
||||||
|
}
|
||||||
|
} else if p.parser.context_mark.line != 0 {
|
||||||
|
line = p.parser.context_mark.line
|
||||||
|
}
|
||||||
|
if line != 0 {
|
||||||
|
where = "line " + strconv.Itoa(line) + ": "
|
||||||
|
}
|
||||||
|
var msg string
|
||||||
|
if len(p.parser.problem) > 0 {
|
||||||
|
msg = p.parser.problem
|
||||||
|
} else {
|
||||||
|
msg = "unknown problem parsing YAML content"
|
||||||
|
}
|
||||||
|
failf("%s%s", where, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) anchor(n *node, anchor []byte) {
|
||||||
|
if anchor != nil {
|
||||||
|
p.doc.anchors[string(anchor)] = n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) parse() *node {
|
||||||
|
p.init()
|
||||||
|
switch p.peek() {
|
||||||
|
case yaml_SCALAR_EVENT:
|
||||||
|
return p.scalar()
|
||||||
|
case yaml_ALIAS_EVENT:
|
||||||
|
return p.alias()
|
||||||
|
case yaml_MAPPING_START_EVENT:
|
||||||
|
return p.mapping()
|
||||||
|
case yaml_SEQUENCE_START_EVENT:
|
||||||
|
return p.sequence()
|
||||||
|
case yaml_DOCUMENT_START_EVENT:
|
||||||
|
return p.document()
|
||||||
|
case yaml_STREAM_END_EVENT:
|
||||||
|
// Happens when attempting to decode an empty buffer.
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
panic("attempted to parse unknown event: " + p.event.typ.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) node(kind int) *node {
|
||||||
|
return &node{
|
||||||
|
kind: kind,
|
||||||
|
line: p.event.start_mark.line,
|
||||||
|
column: p.event.start_mark.column,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) document() *node {
|
||||||
|
n := p.node(documentNode)
|
||||||
|
n.anchors = make(map[string]*node)
|
||||||
|
p.doc = n
|
||||||
|
p.expect(yaml_DOCUMENT_START_EVENT)
|
||||||
|
n.children = append(n.children, p.parse())
|
||||||
|
p.expect(yaml_DOCUMENT_END_EVENT)
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) alias() *node {
|
||||||
|
n := p.node(aliasNode)
|
||||||
|
n.value = string(p.event.anchor)
|
||||||
|
n.alias = p.doc.anchors[n.value]
|
||||||
|
if n.alias == nil {
|
||||||
|
failf("unknown anchor '%s' referenced", n.value)
|
||||||
|
}
|
||||||
|
p.expect(yaml_ALIAS_EVENT)
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) scalar() *node {
|
||||||
|
n := p.node(scalarNode)
|
||||||
|
n.value = string(p.event.value)
|
||||||
|
n.tag = string(p.event.tag)
|
||||||
|
n.implicit = p.event.implicit
|
||||||
|
p.anchor(n, p.event.anchor)
|
||||||
|
p.expect(yaml_SCALAR_EVENT)
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) sequence() *node {
|
||||||
|
n := p.node(sequenceNode)
|
||||||
|
p.anchor(n, p.event.anchor)
|
||||||
|
p.expect(yaml_SEQUENCE_START_EVENT)
|
||||||
|
for p.peek() != yaml_SEQUENCE_END_EVENT {
|
||||||
|
n.children = append(n.children, p.parse())
|
||||||
|
}
|
||||||
|
p.expect(yaml_SEQUENCE_END_EVENT)
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *parser) mapping() *node {
|
||||||
|
n := p.node(mappingNode)
|
||||||
|
p.anchor(n, p.event.anchor)
|
||||||
|
p.expect(yaml_MAPPING_START_EVENT)
|
||||||
|
for p.peek() != yaml_MAPPING_END_EVENT {
|
||||||
|
n.children = append(n.children, p.parse(), p.parse())
|
||||||
|
}
|
||||||
|
p.expect(yaml_MAPPING_END_EVENT)
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
// Decoder, unmarshals a node into a provided value.
|
||||||
|
|
||||||
|
type decoder struct {
|
||||||
|
doc *node
|
||||||
|
aliases map[*node]bool
|
||||||
|
mapType reflect.Type
|
||||||
|
terrors []string
|
||||||
|
strict bool
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
mapItemType = reflect.TypeOf(MapItem{})
|
||||||
|
durationType = reflect.TypeOf(time.Duration(0))
|
||||||
|
defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
|
||||||
|
ifaceType = defaultMapType.Elem()
|
||||||
|
timeType = reflect.TypeOf(time.Time{})
|
||||||
|
ptrTimeType = reflect.TypeOf(&time.Time{})
|
||||||
|
)
|
||||||
|
|
||||||
|
func newDecoder(strict bool) *decoder {
|
||||||
|
d := &decoder{mapType: defaultMapType, strict: strict}
|
||||||
|
d.aliases = make(map[*node]bool)
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) terror(n *node, tag string, out reflect.Value) {
|
||||||
|
if n.tag != "" {
|
||||||
|
tag = n.tag
|
||||||
|
}
|
||||||
|
value := n.value
|
||||||
|
if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG {
|
||||||
|
if len(value) > 10 {
|
||||||
|
value = " `" + value[:7] + "...`"
|
||||||
|
} else {
|
||||||
|
value = " `" + value + "`"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type()))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
|
||||||
|
terrlen := len(d.terrors)
|
||||||
|
err := u.UnmarshalYAML(func(v interface{}) (err error) {
|
||||||
|
defer handleErr(&err)
|
||||||
|
d.unmarshal(n, reflect.ValueOf(v))
|
||||||
|
if len(d.terrors) > terrlen {
|
||||||
|
issues := d.terrors[terrlen:]
|
||||||
|
d.terrors = d.terrors[:terrlen]
|
||||||
|
return &TypeError{issues}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if e, ok := err.(*TypeError); ok {
|
||||||
|
d.terrors = append(d.terrors, e.Errors...)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
fail(err)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// d.prepare initializes and dereferences pointers and calls UnmarshalYAML
|
||||||
|
// if a value is found to implement it.
|
||||||
|
// It returns the initialized and dereferenced out value, whether
|
||||||
|
// unmarshalling was already done by UnmarshalYAML, and if so whether
|
||||||
|
// its types unmarshalled appropriately.
|
||||||
|
//
|
||||||
|
// If n holds a null value, prepare returns before doing anything.
|
||||||
|
func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
|
||||||
|
if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) {
|
||||||
|
return out, false, false
|
||||||
|
}
|
||||||
|
again := true
|
||||||
|
for again {
|
||||||
|
again = false
|
||||||
|
if out.Kind() == reflect.Ptr {
|
||||||
|
if out.IsNil() {
|
||||||
|
out.Set(reflect.New(out.Type().Elem()))
|
||||||
|
}
|
||||||
|
out = out.Elem()
|
||||||
|
again = true
|
||||||
|
}
|
||||||
|
if out.CanAddr() {
|
||||||
|
if u, ok := out.Addr().Interface().(Unmarshaler); ok {
|
||||||
|
good = d.callUnmarshaler(n, u)
|
||||||
|
return out, true, good
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out, false, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
|
||||||
|
switch n.kind {
|
||||||
|
case documentNode:
|
||||||
|
return d.document(n, out)
|
||||||
|
case aliasNode:
|
||||||
|
return d.alias(n, out)
|
||||||
|
}
|
||||||
|
out, unmarshaled, good := d.prepare(n, out)
|
||||||
|
if unmarshaled {
|
||||||
|
return good
|
||||||
|
}
|
||||||
|
switch n.kind {
|
||||||
|
case scalarNode:
|
||||||
|
good = d.scalar(n, out)
|
||||||
|
case mappingNode:
|
||||||
|
good = d.mapping(n, out)
|
||||||
|
case sequenceNode:
|
||||||
|
good = d.sequence(n, out)
|
||||||
|
default:
|
||||||
|
panic("internal error: unknown node kind: " + strconv.Itoa(n.kind))
|
||||||
|
}
|
||||||
|
return good
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) document(n *node, out reflect.Value) (good bool) {
|
||||||
|
if len(n.children) == 1 {
|
||||||
|
d.doc = n
|
||||||
|
d.unmarshal(n.children[0], out)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
|
||||||
|
if d.aliases[n] {
|
||||||
|
// TODO this could actually be allowed in some circumstances.
|
||||||
|
failf("anchor '%s' value contains itself", n.value)
|
||||||
|
}
|
||||||
|
d.aliases[n] = true
|
||||||
|
good = d.unmarshal(n.alias, out)
|
||||||
|
delete(d.aliases, n)
|
||||||
|
return good
|
||||||
|
}
|
||||||
|
|
||||||
|
var zeroValue reflect.Value
|
||||||
|
|
||||||
|
func resetMap(out reflect.Value) {
|
||||||
|
for _, k := range out.MapKeys() {
|
||||||
|
out.SetMapIndex(k, zeroValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) scalar(n *node, out reflect.Value) bool {
|
||||||
|
var tag string
|
||||||
|
var resolved interface{}
|
||||||
|
if n.tag == "" && !n.implicit {
|
||||||
|
tag = yaml_STR_TAG
|
||||||
|
resolved = n.value
|
||||||
|
} else {
|
||||||
|
tag, resolved = resolve(n.tag, n.value)
|
||||||
|
if tag == yaml_BINARY_TAG {
|
||||||
|
data, err := base64.StdEncoding.DecodeString(resolved.(string))
|
||||||
|
if err != nil {
|
||||||
|
failf("!!binary value contains invalid base64 data")
|
||||||
|
}
|
||||||
|
resolved = string(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if resolved == nil {
|
||||||
|
if out.Kind() == reflect.Map && !out.CanAddr() {
|
||||||
|
resetMap(out)
|
||||||
|
} else {
|
||||||
|
out.Set(reflect.Zero(out.Type()))
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
|
||||||
|
// We've resolved to exactly the type we want, so use that.
|
||||||
|
out.Set(resolvedv)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// Perhaps we can use the value as a TextUnmarshaler to
|
||||||
|
// set its value.
|
||||||
|
if out.CanAddr() {
|
||||||
|
u, ok := out.Addr().Interface().(encoding.TextUnmarshaler)
|
||||||
|
if ok {
|
||||||
|
var text []byte
|
||||||
|
if tag == yaml_BINARY_TAG {
|
||||||
|
text = []byte(resolved.(string))
|
||||||
|
} else {
|
||||||
|
// We let any value be unmarshaled into TextUnmarshaler.
|
||||||
|
// That might be more lax than we'd like, but the
|
||||||
|
// TextUnmarshaler itself should bowl out any dubious values.
|
||||||
|
text = []byte(n.value)
|
||||||
|
}
|
||||||
|
err := u.UnmarshalText(text)
|
||||||
|
if err != nil {
|
||||||
|
fail(err)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
switch out.Kind() {
|
||||||
|
case reflect.String:
|
||||||
|
if tag == yaml_BINARY_TAG {
|
||||||
|
out.SetString(resolved.(string))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if resolved != nil {
|
||||||
|
out.SetString(n.value)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case reflect.Interface:
|
||||||
|
if resolved == nil {
|
||||||
|
out.Set(reflect.Zero(out.Type()))
|
||||||
|
} else if tag == yaml_TIMESTAMP_TAG {
|
||||||
|
// It looks like a timestamp but for backward compatibility
|
||||||
|
// reasons we set it as a string, so that code that unmarshals
|
||||||
|
// timestamp-like values into interface{} will continue to
|
||||||
|
// see a string and not a time.Time.
|
||||||
|
// TODO(v3) Drop this.
|
||||||
|
out.Set(reflect.ValueOf(n.value))
|
||||||
|
} else {
|
||||||
|
out.Set(reflect.ValueOf(resolved))
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
switch resolved := resolved.(type) {
|
||||||
|
case int:
|
||||||
|
if !out.OverflowInt(int64(resolved)) {
|
||||||
|
out.SetInt(int64(resolved))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case int64:
|
||||||
|
if !out.OverflowInt(resolved) {
|
||||||
|
out.SetInt(resolved)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case uint64:
|
||||||
|
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||||
|
out.SetInt(int64(resolved))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case float64:
|
||||||
|
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||||
|
out.SetInt(int64(resolved))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case string:
|
||||||
|
if out.Type() == durationType {
|
||||||
|
d, err := time.ParseDuration(resolved)
|
||||||
|
if err == nil {
|
||||||
|
out.SetInt(int64(d))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
|
switch resolved := resolved.(type) {
|
||||||
|
case int:
|
||||||
|
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||||
|
out.SetUint(uint64(resolved))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case int64:
|
||||||
|
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||||
|
out.SetUint(uint64(resolved))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case uint64:
|
||||||
|
if !out.OverflowUint(uint64(resolved)) {
|
||||||
|
out.SetUint(uint64(resolved))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case float64:
|
||||||
|
if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) {
|
||||||
|
out.SetUint(uint64(resolved))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Bool:
|
||||||
|
switch resolved := resolved.(type) {
|
||||||
|
case bool:
|
||||||
|
out.SetBool(resolved)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
switch resolved := resolved.(type) {
|
||||||
|
case int:
|
||||||
|
out.SetFloat(float64(resolved))
|
||||||
|
return true
|
||||||
|
case int64:
|
||||||
|
out.SetFloat(float64(resolved))
|
||||||
|
return true
|
||||||
|
case uint64:
|
||||||
|
out.SetFloat(float64(resolved))
|
||||||
|
return true
|
||||||
|
case float64:
|
||||||
|
out.SetFloat(resolved)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case reflect.Struct:
|
||||||
|
if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
|
||||||
|
out.Set(resolvedv)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
case reflect.Ptr:
|
||||||
|
if out.Type().Elem() == reflect.TypeOf(resolved) {
|
||||||
|
// TODO DOes this make sense? When is out a Ptr except when decoding a nil value?
|
||||||
|
elem := reflect.New(out.Type().Elem())
|
||||||
|
elem.Elem().Set(reflect.ValueOf(resolved))
|
||||||
|
out.Set(elem)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d.terror(n, tag, out)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func settableValueOf(i interface{}) reflect.Value {
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
sv := reflect.New(v.Type()).Elem()
|
||||||
|
sv.Set(v)
|
||||||
|
return sv
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
|
||||||
|
l := len(n.children)
|
||||||
|
|
||||||
|
var iface reflect.Value
|
||||||
|
switch out.Kind() {
|
||||||
|
case reflect.Slice:
|
||||||
|
out.Set(reflect.MakeSlice(out.Type(), l, l))
|
||||||
|
case reflect.Array:
|
||||||
|
if l != out.Len() {
|
||||||
|
failf("invalid array: want %d elements but got %d", out.Len(), l)
|
||||||
|
}
|
||||||
|
case reflect.Interface:
|
||||||
|
// No type hints. Will have to use a generic sequence.
|
||||||
|
iface = out
|
||||||
|
out = settableValueOf(make([]interface{}, l))
|
||||||
|
default:
|
||||||
|
d.terror(n, yaml_SEQ_TAG, out)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
et := out.Type().Elem()
|
||||||
|
|
||||||
|
j := 0
|
||||||
|
for i := 0; i < l; i++ {
|
||||||
|
e := reflect.New(et).Elem()
|
||||||
|
if ok := d.unmarshal(n.children[i], e); ok {
|
||||||
|
out.Index(j).Set(e)
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if out.Kind() != reflect.Array {
|
||||||
|
out.Set(out.Slice(0, j))
|
||||||
|
}
|
||||||
|
if iface.IsValid() {
|
||||||
|
iface.Set(out)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
|
||||||
|
switch out.Kind() {
|
||||||
|
case reflect.Struct:
|
||||||
|
return d.mappingStruct(n, out)
|
||||||
|
case reflect.Slice:
|
||||||
|
return d.mappingSlice(n, out)
|
||||||
|
case reflect.Map:
|
||||||
|
// okay
|
||||||
|
case reflect.Interface:
|
||||||
|
if d.mapType.Kind() == reflect.Map {
|
||||||
|
iface := out
|
||||||
|
out = reflect.MakeMap(d.mapType)
|
||||||
|
iface.Set(out)
|
||||||
|
} else {
|
||||||
|
slicev := reflect.New(d.mapType).Elem()
|
||||||
|
if !d.mappingSlice(n, slicev) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
out.Set(slicev)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
d.terror(n, yaml_MAP_TAG, out)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
outt := out.Type()
|
||||||
|
kt := outt.Key()
|
||||||
|
et := outt.Elem()
|
||||||
|
|
||||||
|
mapType := d.mapType
|
||||||
|
if outt.Key() == ifaceType && outt.Elem() == ifaceType {
|
||||||
|
d.mapType = outt
|
||||||
|
}
|
||||||
|
|
||||||
|
if out.IsNil() {
|
||||||
|
out.Set(reflect.MakeMap(outt))
|
||||||
|
}
|
||||||
|
l := len(n.children)
|
||||||
|
for i := 0; i < l; i += 2 {
|
||||||
|
if isMerge(n.children[i]) {
|
||||||
|
d.merge(n.children[i+1], out)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
k := reflect.New(kt).Elem()
|
||||||
|
if d.unmarshal(n.children[i], k) {
|
||||||
|
kkind := k.Kind()
|
||||||
|
if kkind == reflect.Interface {
|
||||||
|
kkind = k.Elem().Kind()
|
||||||
|
}
|
||||||
|
if kkind == reflect.Map || kkind == reflect.Slice {
|
||||||
|
failf("invalid map key: %#v", k.Interface())
|
||||||
|
}
|
||||||
|
e := reflect.New(et).Elem()
|
||||||
|
if d.unmarshal(n.children[i+1], e) {
|
||||||
|
d.setMapIndex(n.children[i+1], out, k, e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d.mapType = mapType
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) {
|
||||||
|
if d.strict && out.MapIndex(k) != zeroValue {
|
||||||
|
d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
out.SetMapIndex(k, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
|
||||||
|
outt := out.Type()
|
||||||
|
if outt.Elem() != mapItemType {
|
||||||
|
d.terror(n, yaml_MAP_TAG, out)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
mapType := d.mapType
|
||||||
|
d.mapType = outt
|
||||||
|
|
||||||
|
var slice []MapItem
|
||||||
|
var l = len(n.children)
|
||||||
|
for i := 0; i < l; i += 2 {
|
||||||
|
if isMerge(n.children[i]) {
|
||||||
|
d.merge(n.children[i+1], out)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
item := MapItem{}
|
||||||
|
k := reflect.ValueOf(&item.Key).Elem()
|
||||||
|
if d.unmarshal(n.children[i], k) {
|
||||||
|
v := reflect.ValueOf(&item.Value).Elem()
|
||||||
|
if d.unmarshal(n.children[i+1], v) {
|
||||||
|
slice = append(slice, item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out.Set(reflect.ValueOf(slice))
|
||||||
|
d.mapType = mapType
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
||||||
|
sinfo, err := getStructInfo(out.Type())
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
name := settableValueOf("")
|
||||||
|
l := len(n.children)
|
||||||
|
|
||||||
|
var inlineMap reflect.Value
|
||||||
|
var elemType reflect.Type
|
||||||
|
if sinfo.InlineMap != -1 {
|
||||||
|
inlineMap = out.Field(sinfo.InlineMap)
|
||||||
|
inlineMap.Set(reflect.New(inlineMap.Type()).Elem())
|
||||||
|
elemType = inlineMap.Type().Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
var doneFields []bool
|
||||||
|
if d.strict {
|
||||||
|
doneFields = make([]bool, len(sinfo.FieldsList))
|
||||||
|
}
|
||||||
|
for i := 0; i < l; i += 2 {
|
||||||
|
ni := n.children[i]
|
||||||
|
if isMerge(ni) {
|
||||||
|
d.merge(n.children[i+1], out)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !d.unmarshal(ni, name) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if info, ok := sinfo.FieldsMap[name.String()]; ok {
|
||||||
|
if d.strict {
|
||||||
|
if doneFields[info.Id] {
|
||||||
|
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type()))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
doneFields[info.Id] = true
|
||||||
|
}
|
||||||
|
var field reflect.Value
|
||||||
|
if info.Inline == nil {
|
||||||
|
field = out.Field(info.Num)
|
||||||
|
} else {
|
||||||
|
field = out.FieldByIndex(info.Inline)
|
||||||
|
}
|
||||||
|
d.unmarshal(n.children[i+1], field)
|
||||||
|
} else if sinfo.InlineMap != -1 {
|
||||||
|
if inlineMap.IsNil() {
|
||||||
|
inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
|
||||||
|
}
|
||||||
|
value := reflect.New(elemType).Elem()
|
||||||
|
d.unmarshal(n.children[i+1], value)
|
||||||
|
d.setMapIndex(n.children[i+1], inlineMap, name, value)
|
||||||
|
} else if d.strict {
|
||||||
|
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func failWantMap() {
|
||||||
|
failf("map merge requires map or sequence of maps as the value")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) merge(n *node, out reflect.Value) {
|
||||||
|
switch n.kind {
|
||||||
|
case mappingNode:
|
||||||
|
d.unmarshal(n, out)
|
||||||
|
case aliasNode:
|
||||||
|
an, ok := d.doc.anchors[n.value]
|
||||||
|
if ok && an.kind != mappingNode {
|
||||||
|
failWantMap()
|
||||||
|
}
|
||||||
|
d.unmarshal(n, out)
|
||||||
|
case sequenceNode:
|
||||||
|
// Step backwards as earlier nodes take precedence.
|
||||||
|
for i := len(n.children) - 1; i >= 0; i-- {
|
||||||
|
ni := n.children[i]
|
||||||
|
if ni.kind == aliasNode {
|
||||||
|
an, ok := d.doc.anchors[ni.value]
|
||||||
|
if ok && an.kind != mappingNode {
|
||||||
|
failWantMap()
|
||||||
|
}
|
||||||
|
} else if ni.kind != mappingNode {
|
||||||
|
failWantMap()
|
||||||
|
}
|
||||||
|
d.unmarshal(ni, out)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
failWantMap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isMerge(n *node) bool {
|
||||||
|
return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG)
|
||||||
|
}
|
1685
vendor/gopkg.in/yaml.v2/emitterc.go
generated
vendored
Normal file
1685
vendor/gopkg.in/yaml.v2/emitterc.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
362
vendor/gopkg.in/yaml.v2/encode.go
generated
vendored
Normal file
362
vendor/gopkg.in/yaml.v2/encode.go
generated
vendored
Normal file
@ -0,0 +1,362 @@
|
|||||||
|
package yaml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"reflect"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
"unicode/utf8"
|
||||||
|
)
|
||||||
|
|
||||||
|
type encoder struct {
|
||||||
|
emitter yaml_emitter_t
|
||||||
|
event yaml_event_t
|
||||||
|
out []byte
|
||||||
|
flow bool
|
||||||
|
// doneInit holds whether the initial stream_start_event has been
|
||||||
|
// emitted.
|
||||||
|
doneInit bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newEncoder() *encoder {
|
||||||
|
e := &encoder{}
|
||||||
|
yaml_emitter_initialize(&e.emitter)
|
||||||
|
yaml_emitter_set_output_string(&e.emitter, &e.out)
|
||||||
|
yaml_emitter_set_unicode(&e.emitter, true)
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
func newEncoderWithWriter(w io.Writer) *encoder {
|
||||||
|
e := &encoder{}
|
||||||
|
yaml_emitter_initialize(&e.emitter)
|
||||||
|
yaml_emitter_set_output_writer(&e.emitter, w)
|
||||||
|
yaml_emitter_set_unicode(&e.emitter, true)
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) init() {
|
||||||
|
if e.doneInit {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING)
|
||||||
|
e.emit()
|
||||||
|
e.doneInit = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) finish() {
|
||||||
|
e.emitter.open_ended = false
|
||||||
|
yaml_stream_end_event_initialize(&e.event)
|
||||||
|
e.emit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) destroy() {
|
||||||
|
yaml_emitter_delete(&e.emitter)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) emit() {
|
||||||
|
// This will internally delete the e.event value.
|
||||||
|
e.must(yaml_emitter_emit(&e.emitter, &e.event))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) must(ok bool) {
|
||||||
|
if !ok {
|
||||||
|
msg := e.emitter.problem
|
||||||
|
if msg == "" {
|
||||||
|
msg = "unknown problem generating YAML content"
|
||||||
|
}
|
||||||
|
failf("%s", msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) marshalDoc(tag string, in reflect.Value) {
|
||||||
|
e.init()
|
||||||
|
yaml_document_start_event_initialize(&e.event, nil, nil, true)
|
||||||
|
e.emit()
|
||||||
|
e.marshal(tag, in)
|
||||||
|
yaml_document_end_event_initialize(&e.event, true)
|
||||||
|
e.emit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) marshal(tag string, in reflect.Value) {
|
||||||
|
if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() {
|
||||||
|
e.nilv()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
iface := in.Interface()
|
||||||
|
switch m := iface.(type) {
|
||||||
|
case time.Time, *time.Time:
|
||||||
|
// Although time.Time implements TextMarshaler,
|
||||||
|
// we don't want to treat it as a string for YAML
|
||||||
|
// purposes because YAML has special support for
|
||||||
|
// timestamps.
|
||||||
|
case Marshaler:
|
||||||
|
v, err := m.MarshalYAML()
|
||||||
|
if err != nil {
|
||||||
|
fail(err)
|
||||||
|
}
|
||||||
|
if v == nil {
|
||||||
|
e.nilv()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
in = reflect.ValueOf(v)
|
||||||
|
case encoding.TextMarshaler:
|
||||||
|
text, err := m.MarshalText()
|
||||||
|
if err != nil {
|
||||||
|
fail(err)
|
||||||
|
}
|
||||||
|
in = reflect.ValueOf(string(text))
|
||||||
|
case nil:
|
||||||
|
e.nilv()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch in.Kind() {
|
||||||
|
case reflect.Interface:
|
||||||
|
e.marshal(tag, in.Elem())
|
||||||
|
case reflect.Map:
|
||||||
|
e.mapv(tag, in)
|
||||||
|
case reflect.Ptr:
|
||||||
|
if in.Type() == ptrTimeType {
|
||||||
|
e.timev(tag, in.Elem())
|
||||||
|
} else {
|
||||||
|
e.marshal(tag, in.Elem())
|
||||||
|
}
|
||||||
|
case reflect.Struct:
|
||||||
|
if in.Type() == timeType {
|
||||||
|
e.timev(tag, in)
|
||||||
|
} else {
|
||||||
|
e.structv(tag, in)
|
||||||
|
}
|
||||||
|
case reflect.Slice, reflect.Array:
|
||||||
|
if in.Type().Elem() == mapItemType {
|
||||||
|
e.itemsv(tag, in)
|
||||||
|
} else {
|
||||||
|
e.slicev(tag, in)
|
||||||
|
}
|
||||||
|
case reflect.String:
|
||||||
|
e.stringv(tag, in)
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
if in.Type() == durationType {
|
||||||
|
e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String()))
|
||||||
|
} else {
|
||||||
|
e.intv(tag, in)
|
||||||
|
}
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
|
e.uintv(tag, in)
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
e.floatv(tag, in)
|
||||||
|
case reflect.Bool:
|
||||||
|
e.boolv(tag, in)
|
||||||
|
default:
|
||||||
|
panic("cannot marshal type: " + in.Type().String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) mapv(tag string, in reflect.Value) {
|
||||||
|
e.mappingv(tag, func() {
|
||||||
|
keys := keyList(in.MapKeys())
|
||||||
|
sort.Sort(keys)
|
||||||
|
for _, k := range keys {
|
||||||
|
e.marshal("", k)
|
||||||
|
e.marshal("", in.MapIndex(k))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) itemsv(tag string, in reflect.Value) {
|
||||||
|
e.mappingv(tag, func() {
|
||||||
|
slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem)
|
||||||
|
for _, item := range slice {
|
||||||
|
e.marshal("", reflect.ValueOf(item.Key))
|
||||||
|
e.marshal("", reflect.ValueOf(item.Value))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) structv(tag string, in reflect.Value) {
|
||||||
|
sinfo, err := getStructInfo(in.Type())
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
e.mappingv(tag, func() {
|
||||||
|
for _, info := range sinfo.FieldsList {
|
||||||
|
var value reflect.Value
|
||||||
|
if info.Inline == nil {
|
||||||
|
value = in.Field(info.Num)
|
||||||
|
} else {
|
||||||
|
value = in.FieldByIndex(info.Inline)
|
||||||
|
}
|
||||||
|
if info.OmitEmpty && isZero(value) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
e.marshal("", reflect.ValueOf(info.Key))
|
||||||
|
e.flow = info.Flow
|
||||||
|
e.marshal("", value)
|
||||||
|
}
|
||||||
|
if sinfo.InlineMap >= 0 {
|
||||||
|
m := in.Field(sinfo.InlineMap)
|
||||||
|
if m.Len() > 0 {
|
||||||
|
e.flow = false
|
||||||
|
keys := keyList(m.MapKeys())
|
||||||
|
sort.Sort(keys)
|
||||||
|
for _, k := range keys {
|
||||||
|
if _, found := sinfo.FieldsMap[k.String()]; found {
|
||||||
|
panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String()))
|
||||||
|
}
|
||||||
|
e.marshal("", k)
|
||||||
|
e.flow = false
|
||||||
|
e.marshal("", m.MapIndex(k))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) mappingv(tag string, f func()) {
|
||||||
|
implicit := tag == ""
|
||||||
|
style := yaml_BLOCK_MAPPING_STYLE
|
||||||
|
if e.flow {
|
||||||
|
e.flow = false
|
||||||
|
style = yaml_FLOW_MAPPING_STYLE
|
||||||
|
}
|
||||||
|
yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)
|
||||||
|
e.emit()
|
||||||
|
f()
|
||||||
|
yaml_mapping_end_event_initialize(&e.event)
|
||||||
|
e.emit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) slicev(tag string, in reflect.Value) {
|
||||||
|
implicit := tag == ""
|
||||||
|
style := yaml_BLOCK_SEQUENCE_STYLE
|
||||||
|
if e.flow {
|
||||||
|
e.flow = false
|
||||||
|
style = yaml_FLOW_SEQUENCE_STYLE
|
||||||
|
}
|
||||||
|
e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
|
||||||
|
e.emit()
|
||||||
|
n := in.Len()
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
e.marshal("", in.Index(i))
|
||||||
|
}
|
||||||
|
e.must(yaml_sequence_end_event_initialize(&e.event))
|
||||||
|
e.emit()
|
||||||
|
}
|
||||||
|
|
||||||
|
// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1.
|
||||||
|
//
|
||||||
|
// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported
|
||||||
|
// in YAML 1.2 and by this package, but these should be marshalled quoted for
|
||||||
|
// the time being for compatibility with other parsers.
|
||||||
|
func isBase60Float(s string) (result bool) {
|
||||||
|
// Fast path.
|
||||||
|
if s == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
c := s[0]
|
||||||
|
if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// Do the full match.
|
||||||
|
return base60float.MatchString(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
// From http://yaml.org/type/float.html, except the regular expression there
|
||||||
|
// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix.
|
||||||
|
var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`)
|
||||||
|
|
||||||
|
func (e *encoder) stringv(tag string, in reflect.Value) {
|
||||||
|
var style yaml_scalar_style_t
|
||||||
|
s := in.String()
|
||||||
|
canUsePlain := true
|
||||||
|
switch {
|
||||||
|
case !utf8.ValidString(s):
|
||||||
|
if tag == yaml_BINARY_TAG {
|
||||||
|
failf("explicitly tagged !!binary data must be base64-encoded")
|
||||||
|
}
|
||||||
|
if tag != "" {
|
||||||
|
failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag))
|
||||||
|
}
|
||||||
|
// It can't be encoded directly as YAML so use a binary tag
|
||||||
|
// and encode it as base64.
|
||||||
|
tag = yaml_BINARY_TAG
|
||||||
|
s = encodeBase64(s)
|
||||||
|
case tag == "":
|
||||||
|
// Check to see if it would resolve to a specific
|
||||||
|
// tag when encoded unquoted. If it doesn't,
|
||||||
|
// there's no need to quote it.
|
||||||
|
rtag, _ := resolve("", s)
|
||||||
|
canUsePlain = rtag == yaml_STR_TAG && !isBase60Float(s)
|
||||||
|
}
|
||||||
|
// Note: it's possible for user code to emit invalid YAML
|
||||||
|
// if they explicitly specify a tag and a string containing
|
||||||
|
// text that's incompatible with that tag.
|
||||||
|
switch {
|
||||||
|
case strings.Contains(s, "\n"):
|
||||||
|
style = yaml_LITERAL_SCALAR_STYLE
|
||||||
|
case canUsePlain:
|
||||||
|
style = yaml_PLAIN_SCALAR_STYLE
|
||||||
|
default:
|
||||||
|
style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
|
||||||
|
}
|
||||||
|
e.emitScalar(s, "", tag, style)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) boolv(tag string, in reflect.Value) {
|
||||||
|
var s string
|
||||||
|
if in.Bool() {
|
||||||
|
s = "true"
|
||||||
|
} else {
|
||||||
|
s = "false"
|
||||||
|
}
|
||||||
|
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) intv(tag string, in reflect.Value) {
|
||||||
|
s := strconv.FormatInt(in.Int(), 10)
|
||||||
|
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) uintv(tag string, in reflect.Value) {
|
||||||
|
s := strconv.FormatUint(in.Uint(), 10)
|
||||||
|
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) timev(tag string, in reflect.Value) {
|
||||||
|
t := in.Interface().(time.Time)
|
||||||
|
s := t.Format(time.RFC3339Nano)
|
||||||
|
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) floatv(tag string, in reflect.Value) {
|
||||||
|
// Issue #352: When formatting, use the precision of the underlying value
|
||||||
|
precision := 64
|
||||||
|
if in.Kind() == reflect.Float32 {
|
||||||
|
precision = 32
|
||||||
|
}
|
||||||
|
|
||||||
|
s := strconv.FormatFloat(in.Float(), 'g', -1, precision)
|
||||||
|
switch s {
|
||||||
|
case "+Inf":
|
||||||
|
s = ".inf"
|
||||||
|
case "-Inf":
|
||||||
|
s = "-.inf"
|
||||||
|
case "NaN":
|
||||||
|
s = ".nan"
|
||||||
|
}
|
||||||
|
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) nilv() {
|
||||||
|
e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
|
||||||
|
implicit := tag == ""
|
||||||
|
e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
|
||||||
|
e.emit()
|
||||||
|
}
|
5
vendor/gopkg.in/yaml.v2/go.mod
generated
vendored
Normal file
5
vendor/gopkg.in/yaml.v2/go.mod
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
module "gopkg.in/yaml.v2"
|
||||||
|
|
||||||
|
require (
|
||||||
|
"gopkg.in/check.v1" v0.0.0-20161208181325-20d25e280405
|
||||||
|
)
|
1095
vendor/gopkg.in/yaml.v2/parserc.go
generated
vendored
Normal file
1095
vendor/gopkg.in/yaml.v2/parserc.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
412
vendor/gopkg.in/yaml.v2/readerc.go
generated
vendored
Normal file
412
vendor/gopkg.in/yaml.v2/readerc.go
generated
vendored
Normal file
@ -0,0 +1,412 @@
|
|||||||
|
package yaml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Set the reader error and return 0.
|
||||||
|
func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool {
|
||||||
|
parser.error = yaml_READER_ERROR
|
||||||
|
parser.problem = problem
|
||||||
|
parser.problem_offset = offset
|
||||||
|
parser.problem_value = value
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Byte order marks.
|
||||||
|
const (
|
||||||
|
bom_UTF8 = "\xef\xbb\xbf"
|
||||||
|
bom_UTF16LE = "\xff\xfe"
|
||||||
|
bom_UTF16BE = "\xfe\xff"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Determine the input stream encoding by checking the BOM symbol. If no BOM is
|
||||||
|
// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure.
|
||||||
|
func yaml_parser_determine_encoding(parser *yaml_parser_t) bool {
|
||||||
|
// Ensure that we had enough bytes in the raw buffer.
|
||||||
|
for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 {
|
||||||
|
if !yaml_parser_update_raw_buffer(parser) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the encoding.
|
||||||
|
buf := parser.raw_buffer
|
||||||
|
pos := parser.raw_buffer_pos
|
||||||
|
avail := len(buf) - pos
|
||||||
|
if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] {
|
||||||
|
parser.encoding = yaml_UTF16LE_ENCODING
|
||||||
|
parser.raw_buffer_pos += 2
|
||||||
|
parser.offset += 2
|
||||||
|
} else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] {
|
||||||
|
parser.encoding = yaml_UTF16BE_ENCODING
|
||||||
|
parser.raw_buffer_pos += 2
|
||||||
|
parser.offset += 2
|
||||||
|
} else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] {
|
||||||
|
parser.encoding = yaml_UTF8_ENCODING
|
||||||
|
parser.raw_buffer_pos += 3
|
||||||
|
parser.offset += 3
|
||||||
|
} else {
|
||||||
|
parser.encoding = yaml_UTF8_ENCODING
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the raw buffer.
|
||||||
|
func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool {
|
||||||
|
size_read := 0
|
||||||
|
|
||||||
|
// Return if the raw buffer is full.
|
||||||
|
if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return on EOF.
|
||||||
|
if parser.eof {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move the remaining bytes in the raw buffer to the beginning.
|
||||||
|
if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) {
|
||||||
|
copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:])
|
||||||
|
}
|
||||||
|
parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos]
|
||||||
|
parser.raw_buffer_pos = 0
|
||||||
|
|
||||||
|
// Call the read handler to fill the buffer.
|
||||||
|
size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)])
|
||||||
|
parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read]
|
||||||
|
if err == io.EOF {
|
||||||
|
parser.eof = true
|
||||||
|
} else if err != nil {
|
||||||
|
return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure that the buffer contains at least `length` characters.
|
||||||
|
// Return true on success, false on failure.
|
||||||
|
//
|
||||||
|
// The length is supposed to be significantly less that the buffer size.
|
||||||
|
func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
|
||||||
|
if parser.read_handler == nil {
|
||||||
|
panic("read handler must be set")
|
||||||
|
}
|
||||||
|
|
||||||
|
// [Go] This function was changed to guarantee the requested length size at EOF.
|
||||||
|
// The fact we need to do this is pretty awful, but the description above implies
|
||||||
|
// for that to be the case, and there are tests
|
||||||
|
|
||||||
|
// If the EOF flag is set and the raw buffer is empty, do nothing.
|
||||||
|
if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
|
||||||
|
// [Go] ACTUALLY! Read the documentation of this function above.
|
||||||
|
// This is just broken. To return true, we need to have the
|
||||||
|
// given length in the buffer. Not doing that means every single
|
||||||
|
// check that calls this function to make sure the buffer has a
|
||||||
|
// given length is Go) panicking; or C) accessing invalid memory.
|
||||||
|
//return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return if the buffer contains enough characters.
|
||||||
|
if parser.unread >= length {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the input encoding if it is not known yet.
|
||||||
|
if parser.encoding == yaml_ANY_ENCODING {
|
||||||
|
if !yaml_parser_determine_encoding(parser) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move the unread characters to the beginning of the buffer.
|
||||||
|
buffer_len := len(parser.buffer)
|
||||||
|
if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len {
|
||||||
|
copy(parser.buffer, parser.buffer[parser.buffer_pos:])
|
||||||
|
buffer_len -= parser.buffer_pos
|
||||||
|
parser.buffer_pos = 0
|
||||||
|
} else if parser.buffer_pos == buffer_len {
|
||||||
|
buffer_len = 0
|
||||||
|
parser.buffer_pos = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open the whole buffer for writing, and cut it before returning.
|
||||||
|
parser.buffer = parser.buffer[:cap(parser.buffer)]
|
||||||
|
|
||||||
|
// Fill the buffer until it has enough characters.
|
||||||
|
first := true
|
||||||
|
for parser.unread < length {
|
||||||
|
|
||||||
|
// Fill the raw buffer if necessary.
|
||||||
|
if !first || parser.raw_buffer_pos == len(parser.raw_buffer) {
|
||||||
|
if !yaml_parser_update_raw_buffer(parser) {
|
||||||
|
parser.buffer = parser.buffer[:buffer_len]
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
first = false
|
||||||
|
|
||||||
|
// Decode the raw buffer.
|
||||||
|
inner:
|
||||||
|
for parser.raw_buffer_pos != len(parser.raw_buffer) {
|
||||||
|
var value rune
|
||||||
|
var width int
|
||||||
|
|
||||||
|
raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos
|
||||||
|
|
||||||
|
// Decode the next character.
|
||||||
|
switch parser.encoding {
|
||||||
|
case yaml_UTF8_ENCODING:
|
||||||
|
// Decode a UTF-8 character. Check RFC 3629
|
||||||
|
// (http://www.ietf.org/rfc/rfc3629.txt) for more details.
|
||||||
|
//
|
||||||
|
// The following table (taken from the RFC) is used for
|
||||||
|
// decoding.
|
||||||
|
//
|
||||||
|
// Char. number range | UTF-8 octet sequence
|
||||||
|
// (hexadecimal) | (binary)
|
||||||
|
// --------------------+------------------------------------
|
||||||
|
// 0000 0000-0000 007F | 0xxxxxxx
|
||||||
|
// 0000 0080-0000 07FF | 110xxxxx 10xxxxxx
|
||||||
|
// 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx
|
||||||
|
// 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
|
||||||
|
//
|
||||||
|
// Additionally, the characters in the range 0xD800-0xDFFF
|
||||||
|
// are prohibited as they are reserved for use with UTF-16
|
||||||
|
// surrogate pairs.
|
||||||
|
|
||||||
|
// Determine the length of the UTF-8 sequence.
|
||||||
|
octet := parser.raw_buffer[parser.raw_buffer_pos]
|
||||||
|
switch {
|
||||||
|
case octet&0x80 == 0x00:
|
||||||
|
width = 1
|
||||||
|
case octet&0xE0 == 0xC0:
|
||||||
|
width = 2
|
||||||
|
case octet&0xF0 == 0xE0:
|
||||||
|
width = 3
|
||||||
|
case octet&0xF8 == 0xF0:
|
||||||
|
width = 4
|
||||||
|
default:
|
||||||
|
// The leading octet is invalid.
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"invalid leading UTF-8 octet",
|
||||||
|
parser.offset, int(octet))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the raw buffer contains an incomplete character.
|
||||||
|
if width > raw_unread {
|
||||||
|
if parser.eof {
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"incomplete UTF-8 octet sequence",
|
||||||
|
parser.offset, -1)
|
||||||
|
}
|
||||||
|
break inner
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode the leading octet.
|
||||||
|
switch {
|
||||||
|
case octet&0x80 == 0x00:
|
||||||
|
value = rune(octet & 0x7F)
|
||||||
|
case octet&0xE0 == 0xC0:
|
||||||
|
value = rune(octet & 0x1F)
|
||||||
|
case octet&0xF0 == 0xE0:
|
||||||
|
value = rune(octet & 0x0F)
|
||||||
|
case octet&0xF8 == 0xF0:
|
||||||
|
value = rune(octet & 0x07)
|
||||||
|
default:
|
||||||
|
value = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check and decode the trailing octets.
|
||||||
|
for k := 1; k < width; k++ {
|
||||||
|
octet = parser.raw_buffer[parser.raw_buffer_pos+k]
|
||||||
|
|
||||||
|
// Check if the octet is valid.
|
||||||
|
if (octet & 0xC0) != 0x80 {
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"invalid trailing UTF-8 octet",
|
||||||
|
parser.offset+k, int(octet))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode the octet.
|
||||||
|
value = (value << 6) + rune(octet&0x3F)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the length of the sequence against the value.
|
||||||
|
switch {
|
||||||
|
case width == 1:
|
||||||
|
case width == 2 && value >= 0x80:
|
||||||
|
case width == 3 && value >= 0x800:
|
||||||
|
case width == 4 && value >= 0x10000:
|
||||||
|
default:
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"invalid length of a UTF-8 sequence",
|
||||||
|
parser.offset, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the range of the value.
|
||||||
|
if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF {
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"invalid Unicode character",
|
||||||
|
parser.offset, int(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING:
|
||||||
|
var low, high int
|
||||||
|
if parser.encoding == yaml_UTF16LE_ENCODING {
|
||||||
|
low, high = 0, 1
|
||||||
|
} else {
|
||||||
|
low, high = 1, 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// The UTF-16 encoding is not as simple as one might
|
||||||
|
// naively think. Check RFC 2781
|
||||||
|
// (http://www.ietf.org/rfc/rfc2781.txt).
|
||||||
|
//
|
||||||
|
// Normally, two subsequent bytes describe a Unicode
|
||||||
|
// character. However a special technique (called a
|
||||||
|
// surrogate pair) is used for specifying character
|
||||||
|
// values larger than 0xFFFF.
|
||||||
|
//
|
||||||
|
// A surrogate pair consists of two pseudo-characters:
|
||||||
|
// high surrogate area (0xD800-0xDBFF)
|
||||||
|
// low surrogate area (0xDC00-0xDFFF)
|
||||||
|
//
|
||||||
|
// The following formulas are used for decoding
|
||||||
|
// and encoding characters using surrogate pairs:
|
||||||
|
//
|
||||||
|
// U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF)
|
||||||
|
// U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF)
|
||||||
|
// W1 = 110110yyyyyyyyyy
|
||||||
|
// W2 = 110111xxxxxxxxxx
|
||||||
|
//
|
||||||
|
// where U is the character value, W1 is the high surrogate
|
||||||
|
// area, W2 is the low surrogate area.
|
||||||
|
|
||||||
|
// Check for incomplete UTF-16 character.
|
||||||
|
if raw_unread < 2 {
|
||||||
|
if parser.eof {
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"incomplete UTF-16 character",
|
||||||
|
parser.offset, -1)
|
||||||
|
}
|
||||||
|
break inner
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the character.
|
||||||
|
value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) +
|
||||||
|
(rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8)
|
||||||
|
|
||||||
|
// Check for unexpected low surrogate area.
|
||||||
|
if value&0xFC00 == 0xDC00 {
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"unexpected low surrogate area",
|
||||||
|
parser.offset, int(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for a high surrogate area.
|
||||||
|
if value&0xFC00 == 0xD800 {
|
||||||
|
width = 4
|
||||||
|
|
||||||
|
// Check for incomplete surrogate pair.
|
||||||
|
if raw_unread < 4 {
|
||||||
|
if parser.eof {
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"incomplete UTF-16 surrogate pair",
|
||||||
|
parser.offset, -1)
|
||||||
|
}
|
||||||
|
break inner
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the next character.
|
||||||
|
value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) +
|
||||||
|
(rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8)
|
||||||
|
|
||||||
|
// Check for a low surrogate area.
|
||||||
|
if value2&0xFC00 != 0xDC00 {
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"expected low surrogate area",
|
||||||
|
parser.offset+2, int(value2))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate the value of the surrogate pair.
|
||||||
|
value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF)
|
||||||
|
} else {
|
||||||
|
width = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic("impossible")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character is in the allowed range:
|
||||||
|
// #x9 | #xA | #xD | [#x20-#x7E] (8 bit)
|
||||||
|
// | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit)
|
||||||
|
// | [#x10000-#x10FFFF] (32 bit)
|
||||||
|
switch {
|
||||||
|
case value == 0x09:
|
||||||
|
case value == 0x0A:
|
||||||
|
case value == 0x0D:
|
||||||
|
case value >= 0x20 && value <= 0x7E:
|
||||||
|
case value == 0x85:
|
||||||
|
case value >= 0xA0 && value <= 0xD7FF:
|
||||||
|
case value >= 0xE000 && value <= 0xFFFD:
|
||||||
|
case value >= 0x10000 && value <= 0x10FFFF:
|
||||||
|
default:
|
||||||
|
return yaml_parser_set_reader_error(parser,
|
||||||
|
"control characters are not allowed",
|
||||||
|
parser.offset, int(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move the raw pointers.
|
||||||
|
parser.raw_buffer_pos += width
|
||||||
|
parser.offset += width
|
||||||
|
|
||||||
|
// Finally put the character into the buffer.
|
||||||
|
if value <= 0x7F {
|
||||||
|
// 0000 0000-0000 007F . 0xxxxxxx
|
||||||
|
parser.buffer[buffer_len+0] = byte(value)
|
||||||
|
buffer_len += 1
|
||||||
|
} else if value <= 0x7FF {
|
||||||
|
// 0000 0080-0000 07FF . 110xxxxx 10xxxxxx
|
||||||
|
parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6))
|
||||||
|
parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F))
|
||||||
|
buffer_len += 2
|
||||||
|
} else if value <= 0xFFFF {
|
||||||
|
// 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx
|
||||||
|
parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12))
|
||||||
|
parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F))
|
||||||
|
parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F))
|
||||||
|
buffer_len += 3
|
||||||
|
} else {
|
||||||
|
// 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
|
||||||
|
parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18))
|
||||||
|
parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F))
|
||||||
|
parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F))
|
||||||
|
parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F))
|
||||||
|
buffer_len += 4
|
||||||
|
}
|
||||||
|
|
||||||
|
parser.unread++
|
||||||
|
}
|
||||||
|
|
||||||
|
// On EOF, put NUL into the buffer and return.
|
||||||
|
if parser.eof {
|
||||||
|
parser.buffer[buffer_len] = 0
|
||||||
|
buffer_len++
|
||||||
|
parser.unread++
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// [Go] Read the documentation of this function above. To return true,
|
||||||
|
// we need to have the given length in the buffer. Not doing that means
|
||||||
|
// every single check that calls this function to make sure the buffer
|
||||||
|
// has a given length is Go) panicking; or C) accessing invalid memory.
|
||||||
|
// This happens here due to the EOF above breaking early.
|
||||||
|
for buffer_len < length {
|
||||||
|
parser.buffer[buffer_len] = 0
|
||||||
|
buffer_len++
|
||||||
|
}
|
||||||
|
parser.buffer = parser.buffer[:buffer_len]
|
||||||
|
return true
|
||||||
|
}
|
258
vendor/gopkg.in/yaml.v2/resolve.go
generated
vendored
Normal file
258
vendor/gopkg.in/yaml.v2/resolve.go
generated
vendored
Normal file
@ -0,0 +1,258 @@
|
|||||||
|
package yaml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"math"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type resolveMapItem struct {
|
||||||
|
value interface{}
|
||||||
|
tag string
|
||||||
|
}
|
||||||
|
|
||||||
|
var resolveTable = make([]byte, 256)
|
||||||
|
var resolveMap = make(map[string]resolveMapItem)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
t := resolveTable
|
||||||
|
t[int('+')] = 'S' // Sign
|
||||||
|
t[int('-')] = 'S'
|
||||||
|
for _, c := range "0123456789" {
|
||||||
|
t[int(c)] = 'D' // Digit
|
||||||
|
}
|
||||||
|
for _, c := range "yYnNtTfFoO~" {
|
||||||
|
t[int(c)] = 'M' // In map
|
||||||
|
}
|
||||||
|
t[int('.')] = '.' // Float (potentially in map)
|
||||||
|
|
||||||
|
var resolveMapList = []struct {
|
||||||
|
v interface{}
|
||||||
|
tag string
|
||||||
|
l []string
|
||||||
|
}{
|
||||||
|
{true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}},
|
||||||
|
{true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}},
|
||||||
|
{true, yaml_BOOL_TAG, []string{"on", "On", "ON"}},
|
||||||
|
{false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}},
|
||||||
|
{false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}},
|
||||||
|
{false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}},
|
||||||
|
{nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}},
|
||||||
|
{math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}},
|
||||||
|
{math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}},
|
||||||
|
{math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}},
|
||||||
|
{math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}},
|
||||||
|
{"<<", yaml_MERGE_TAG, []string{"<<"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
m := resolveMap
|
||||||
|
for _, item := range resolveMapList {
|
||||||
|
for _, s := range item.l {
|
||||||
|
m[s] = resolveMapItem{item.v, item.tag}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const longTagPrefix = "tag:yaml.org,2002:"
|
||||||
|
|
||||||
|
func shortTag(tag string) string {
|
||||||
|
// TODO This can easily be made faster and produce less garbage.
|
||||||
|
if strings.HasPrefix(tag, longTagPrefix) {
|
||||||
|
return "!!" + tag[len(longTagPrefix):]
|
||||||
|
}
|
||||||
|
return tag
|
||||||
|
}
|
||||||
|
|
||||||
|
func longTag(tag string) string {
|
||||||
|
if strings.HasPrefix(tag, "!!") {
|
||||||
|
return longTagPrefix + tag[2:]
|
||||||
|
}
|
||||||
|
return tag
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolvableTag(tag string) bool {
|
||||||
|
switch tag {
|
||||||
|
case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG, yaml_TIMESTAMP_TAG:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var yamlStyleFloat = regexp.MustCompile(`^[-+]?[0-9]*\.?[0-9]+([eE][-+][0-9]+)?$`)
|
||||||
|
|
||||||
|
func resolve(tag string, in string) (rtag string, out interface{}) {
|
||||||
|
if !resolvableTag(tag) {
|
||||||
|
return tag, in
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
switch tag {
|
||||||
|
case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
|
||||||
|
return
|
||||||
|
case yaml_FLOAT_TAG:
|
||||||
|
if rtag == yaml_INT_TAG {
|
||||||
|
switch v := out.(type) {
|
||||||
|
case int64:
|
||||||
|
rtag = yaml_FLOAT_TAG
|
||||||
|
out = float64(v)
|
||||||
|
return
|
||||||
|
case int:
|
||||||
|
rtag = yaml_FLOAT_TAG
|
||||||
|
out = float64(v)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag))
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Any data is accepted as a !!str or !!binary.
|
||||||
|
// Otherwise, the prefix is enough of a hint about what it might be.
|
||||||
|
hint := byte('N')
|
||||||
|
if in != "" {
|
||||||
|
hint = resolveTable[in[0]]
|
||||||
|
}
|
||||||
|
if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG {
|
||||||
|
// Handle things we can lookup in a map.
|
||||||
|
if item, ok := resolveMap[in]; ok {
|
||||||
|
return item.tag, item.value
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base 60 floats are a bad idea, were dropped in YAML 1.2, and
|
||||||
|
// are purposefully unsupported here. They're still quoted on
|
||||||
|
// the way out for compatibility with other parser, though.
|
||||||
|
|
||||||
|
switch hint {
|
||||||
|
case 'M':
|
||||||
|
// We've already checked the map above.
|
||||||
|
|
||||||
|
case '.':
|
||||||
|
// Not in the map, so maybe a normal float.
|
||||||
|
floatv, err := strconv.ParseFloat(in, 64)
|
||||||
|
if err == nil {
|
||||||
|
return yaml_FLOAT_TAG, floatv
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'D', 'S':
|
||||||
|
// Int, float, or timestamp.
|
||||||
|
// Only try values as a timestamp if the value is unquoted or there's an explicit
|
||||||
|
// !!timestamp tag.
|
||||||
|
if tag == "" || tag == yaml_TIMESTAMP_TAG {
|
||||||
|
t, ok := parseTimestamp(in)
|
||||||
|
if ok {
|
||||||
|
return yaml_TIMESTAMP_TAG, t
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
plain := strings.Replace(in, "_", "", -1)
|
||||||
|
intv, err := strconv.ParseInt(plain, 0, 64)
|
||||||
|
if err == nil {
|
||||||
|
if intv == int64(int(intv)) {
|
||||||
|
return yaml_INT_TAG, int(intv)
|
||||||
|
} else {
|
||||||
|
return yaml_INT_TAG, intv
|
||||||
|
}
|
||||||
|
}
|
||||||
|
uintv, err := strconv.ParseUint(plain, 0, 64)
|
||||||
|
if err == nil {
|
||||||
|
return yaml_INT_TAG, uintv
|
||||||
|
}
|
||||||
|
if yamlStyleFloat.MatchString(plain) {
|
||||||
|
floatv, err := strconv.ParseFloat(plain, 64)
|
||||||
|
if err == nil {
|
||||||
|
return yaml_FLOAT_TAG, floatv
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(plain, "0b") {
|
||||||
|
intv, err := strconv.ParseInt(plain[2:], 2, 64)
|
||||||
|
if err == nil {
|
||||||
|
if intv == int64(int(intv)) {
|
||||||
|
return yaml_INT_TAG, int(intv)
|
||||||
|
} else {
|
||||||
|
return yaml_INT_TAG, intv
|
||||||
|
}
|
||||||
|
}
|
||||||
|
uintv, err := strconv.ParseUint(plain[2:], 2, 64)
|
||||||
|
if err == nil {
|
||||||
|
return yaml_INT_TAG, uintv
|
||||||
|
}
|
||||||
|
} else if strings.HasPrefix(plain, "-0b") {
|
||||||
|
intv, err := strconv.ParseInt("-" + plain[3:], 2, 64)
|
||||||
|
if err == nil {
|
||||||
|
if true || intv == int64(int(intv)) {
|
||||||
|
return yaml_INT_TAG, int(intv)
|
||||||
|
} else {
|
||||||
|
return yaml_INT_TAG, intv
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return yaml_STR_TAG, in
|
||||||
|
}
|
||||||
|
|
||||||
|
// encodeBase64 encodes s as base64 that is broken up into multiple lines
|
||||||
|
// as appropriate for the resulting length.
|
||||||
|
func encodeBase64(s string) string {
|
||||||
|
const lineLen = 70
|
||||||
|
encLen := base64.StdEncoding.EncodedLen(len(s))
|
||||||
|
lines := encLen/lineLen + 1
|
||||||
|
buf := make([]byte, encLen*2+lines)
|
||||||
|
in := buf[0:encLen]
|
||||||
|
out := buf[encLen:]
|
||||||
|
base64.StdEncoding.Encode(in, []byte(s))
|
||||||
|
k := 0
|
||||||
|
for i := 0; i < len(in); i += lineLen {
|
||||||
|
j := i + lineLen
|
||||||
|
if j > len(in) {
|
||||||
|
j = len(in)
|
||||||
|
}
|
||||||
|
k += copy(out[k:], in[i:j])
|
||||||
|
if lines > 1 {
|
||||||
|
out[k] = '\n'
|
||||||
|
k++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return string(out[:k])
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is a subset of the formats allowed by the regular expression
|
||||||
|
// defined at http://yaml.org/type/timestamp.html.
|
||||||
|
var allowedTimestampFormats = []string{
|
||||||
|
"2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
|
||||||
|
"2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
|
||||||
|
"2006-1-2 15:4:5.999999999", // space separated with no time zone
|
||||||
|
"2006-1-2", // date only
|
||||||
|
// Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5"
|
||||||
|
// from the set of examples.
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTimestamp parses s as a timestamp string and
|
||||||
|
// returns the timestamp and reports whether it succeeded.
|
||||||
|
// Timestamp formats are defined at http://yaml.org/type/timestamp.html
|
||||||
|
func parseTimestamp(s string) (time.Time, bool) {
|
||||||
|
// TODO write code to check all the formats supported by
|
||||||
|
// http://yaml.org/type/timestamp.html instead of using time.Parse.
|
||||||
|
|
||||||
|
// Quick check: all date formats start with YYYY-.
|
||||||
|
i := 0
|
||||||
|
for ; i < len(s); i++ {
|
||||||
|
if c := s[i]; c < '0' || c > '9' {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if i != 4 || i == len(s) || s[i] != '-' {
|
||||||
|
return time.Time{}, false
|
||||||
|
}
|
||||||
|
for _, format := range allowedTimestampFormats {
|
||||||
|
if t, err := time.Parse(format, s); err == nil {
|
||||||
|
return t, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return time.Time{}, false
|
||||||
|
}
|
2696
vendor/gopkg.in/yaml.v2/scannerc.go
generated
vendored
Normal file
2696
vendor/gopkg.in/yaml.v2/scannerc.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
113
vendor/gopkg.in/yaml.v2/sorter.go
generated
vendored
Normal file
113
vendor/gopkg.in/yaml.v2/sorter.go
generated
vendored
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
package yaml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
type keyList []reflect.Value
|
||||||
|
|
||||||
|
func (l keyList) Len() int { return len(l) }
|
||||||
|
func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
||||||
|
func (l keyList) Less(i, j int) bool {
|
||||||
|
a := l[i]
|
||||||
|
b := l[j]
|
||||||
|
ak := a.Kind()
|
||||||
|
bk := b.Kind()
|
||||||
|
for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() {
|
||||||
|
a = a.Elem()
|
||||||
|
ak = a.Kind()
|
||||||
|
}
|
||||||
|
for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() {
|
||||||
|
b = b.Elem()
|
||||||
|
bk = b.Kind()
|
||||||
|
}
|
||||||
|
af, aok := keyFloat(a)
|
||||||
|
bf, bok := keyFloat(b)
|
||||||
|
if aok && bok {
|
||||||
|
if af != bf {
|
||||||
|
return af < bf
|
||||||
|
}
|
||||||
|
if ak != bk {
|
||||||
|
return ak < bk
|
||||||
|
}
|
||||||
|
return numLess(a, b)
|
||||||
|
}
|
||||||
|
if ak != reflect.String || bk != reflect.String {
|
||||||
|
return ak < bk
|
||||||
|
}
|
||||||
|
ar, br := []rune(a.String()), []rune(b.String())
|
||||||
|
for i := 0; i < len(ar) && i < len(br); i++ {
|
||||||
|
if ar[i] == br[i] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
al := unicode.IsLetter(ar[i])
|
||||||
|
bl := unicode.IsLetter(br[i])
|
||||||
|
if al && bl {
|
||||||
|
return ar[i] < br[i]
|
||||||
|
}
|
||||||
|
if al || bl {
|
||||||
|
return bl
|
||||||
|
}
|
||||||
|
var ai, bi int
|
||||||
|
var an, bn int64
|
||||||
|
if ar[i] == '0' || br[i] == '0' {
|
||||||
|
for j := i-1; j >= 0 && unicode.IsDigit(ar[j]); j-- {
|
||||||
|
if ar[j] != '0' {
|
||||||
|
an = 1
|
||||||
|
bn = 1
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ {
|
||||||
|
an = an*10 + int64(ar[ai]-'0')
|
||||||
|
}
|
||||||
|
for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ {
|
||||||
|
bn = bn*10 + int64(br[bi]-'0')
|
||||||
|
}
|
||||||
|
if an != bn {
|
||||||
|
return an < bn
|
||||||
|
}
|
||||||
|
if ai != bi {
|
||||||
|
return ai < bi
|
||||||
|
}
|
||||||
|
return ar[i] < br[i]
|
||||||
|
}
|
||||||
|
return len(ar) < len(br)
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyFloat returns a float value for v if it is a number/bool
|
||||||
|
// and whether it is a number/bool or not.
|
||||||
|
func keyFloat(v reflect.Value) (f float64, ok bool) {
|
||||||
|
switch v.Kind() {
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
return float64(v.Int()), true
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return v.Float(), true
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
|
return float64(v.Uint()), true
|
||||||
|
case reflect.Bool:
|
||||||
|
if v.Bool() {
|
||||||
|
return 1, true
|
||||||
|
}
|
||||||
|
return 0, true
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// numLess returns whether a < b.
|
||||||
|
// a and b must necessarily have the same kind.
|
||||||
|
func numLess(a, b reflect.Value) bool {
|
||||||
|
switch a.Kind() {
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
return a.Int() < b.Int()
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return a.Float() < b.Float()
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
|
return a.Uint() < b.Uint()
|
||||||
|
case reflect.Bool:
|
||||||
|
return !a.Bool() && b.Bool()
|
||||||
|
}
|
||||||
|
panic("not a number")
|
||||||
|
}
|
26
vendor/gopkg.in/yaml.v2/writerc.go
generated
vendored
Normal file
26
vendor/gopkg.in/yaml.v2/writerc.go
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
package yaml
|
||||||
|
|
||||||
|
// Set the writer error and return false.
|
||||||
|
func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool {
|
||||||
|
emitter.error = yaml_WRITER_ERROR
|
||||||
|
emitter.problem = problem
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flush the output buffer.
|
||||||
|
func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
|
||||||
|
if emitter.write_handler == nil {
|
||||||
|
panic("write handler not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the buffer is empty.
|
||||||
|
if emitter.buffer_pos == 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
|
||||||
|
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
|
||||||
|
}
|
||||||
|
emitter.buffer_pos = 0
|
||||||
|
return true
|
||||||
|
}
|
466
vendor/gopkg.in/yaml.v2/yaml.go
generated
vendored
Normal file
466
vendor/gopkg.in/yaml.v2/yaml.go
generated
vendored
Normal file
@ -0,0 +1,466 @@
|
|||||||
|
// Package yaml implements YAML support for the Go language.
|
||||||
|
//
|
||||||
|
// Source code and other details for the project are available at GitHub:
|
||||||
|
//
|
||||||
|
// https://github.com/go-yaml/yaml
|
||||||
|
//
|
||||||
|
package yaml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MapSlice encodes and decodes as a YAML map.
|
||||||
|
// The order of keys is preserved when encoding and decoding.
|
||||||
|
type MapSlice []MapItem
|
||||||
|
|
||||||
|
// MapItem is an item in a MapSlice.
|
||||||
|
type MapItem struct {
|
||||||
|
Key, Value interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The Unmarshaler interface may be implemented by types to customize their
|
||||||
|
// behavior when being unmarshaled from a YAML document. The UnmarshalYAML
|
||||||
|
// method receives a function that may be called to unmarshal the original
|
||||||
|
// YAML value into a field or variable. It is safe to call the unmarshal
|
||||||
|
// function parameter more than once if necessary.
|
||||||
|
type Unmarshaler interface {
|
||||||
|
UnmarshalYAML(unmarshal func(interface{}) error) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// The Marshaler interface may be implemented by types to customize their
|
||||||
|
// behavior when being marshaled into a YAML document. The returned value
|
||||||
|
// is marshaled in place of the original value implementing Marshaler.
|
||||||
|
//
|
||||||
|
// If an error is returned by MarshalYAML, the marshaling procedure stops
|
||||||
|
// and returns with the provided error.
|
||||||
|
type Marshaler interface {
|
||||||
|
MarshalYAML() (interface{}, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshal decodes the first document found within the in byte slice
|
||||||
|
// and assigns decoded values into the out value.
|
||||||
|
//
|
||||||
|
// Maps and pointers (to a struct, string, int, etc) are accepted as out
|
||||||
|
// values. If an internal pointer within a struct is not initialized,
|
||||||
|
// the yaml package will initialize it if necessary for unmarshalling
|
||||||
|
// the provided data. The out parameter must not be nil.
|
||||||
|
//
|
||||||
|
// The type of the decoded values should be compatible with the respective
|
||||||
|
// values in out. If one or more values cannot be decoded due to a type
|
||||||
|
// mismatches, decoding continues partially until the end of the YAML
|
||||||
|
// content, and a *yaml.TypeError is returned with details for all
|
||||||
|
// missed values.
|
||||||
|
//
|
||||||
|
// Struct fields are only unmarshalled if they are exported (have an
|
||||||
|
// upper case first letter), and are unmarshalled using the field name
|
||||||
|
// lowercased as the default key. Custom keys may be defined via the
|
||||||
|
// "yaml" name in the field tag: the content preceding the first comma
|
||||||
|
// is used as the key, and the following comma-separated options are
|
||||||
|
// used to tweak the marshalling process (see Marshal).
|
||||||
|
// Conflicting names result in a runtime error.
|
||||||
|
//
|
||||||
|
// For example:
|
||||||
|
//
|
||||||
|
// type T struct {
|
||||||
|
// F int `yaml:"a,omitempty"`
|
||||||
|
// B int
|
||||||
|
// }
|
||||||
|
// var t T
|
||||||
|
// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t)
|
||||||
|
//
|
||||||
|
// See the documentation of Marshal for the format of tags and a list of
|
||||||
|
// supported tag options.
|
||||||
|
//
|
||||||
|
func Unmarshal(in []byte, out interface{}) (err error) {
|
||||||
|
return unmarshal(in, out, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalStrict is like Unmarshal except that any fields that are found
|
||||||
|
// in the data that do not have corresponding struct members, or mapping
|
||||||
|
// keys that are duplicates, will result in
|
||||||
|
// an error.
|
||||||
|
func UnmarshalStrict(in []byte, out interface{}) (err error) {
|
||||||
|
return unmarshal(in, out, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Decorder reads and decodes YAML values from an input stream.
|
||||||
|
type Decoder struct {
|
||||||
|
strict bool
|
||||||
|
parser *parser
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDecoder returns a new decoder that reads from r.
|
||||||
|
//
|
||||||
|
// The decoder introduces its own buffering and may read
|
||||||
|
// data from r beyond the YAML values requested.
|
||||||
|
func NewDecoder(r io.Reader) *Decoder {
|
||||||
|
return &Decoder{
|
||||||
|
parser: newParserFromReader(r),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetStrict sets whether strict decoding behaviour is enabled when
|
||||||
|
// decoding items in the data (see UnmarshalStrict). By default, decoding is not strict.
|
||||||
|
func (dec *Decoder) SetStrict(strict bool) {
|
||||||
|
dec.strict = strict
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode reads the next YAML-encoded value from its input
|
||||||
|
// and stores it in the value pointed to by v.
|
||||||
|
//
|
||||||
|
// See the documentation for Unmarshal for details about the
|
||||||
|
// conversion of YAML into a Go value.
|
||||||
|
func (dec *Decoder) Decode(v interface{}) (err error) {
|
||||||
|
d := newDecoder(dec.strict)
|
||||||
|
defer handleErr(&err)
|
||||||
|
node := dec.parser.parse()
|
||||||
|
if node == nil {
|
||||||
|
return io.EOF
|
||||||
|
}
|
||||||
|
out := reflect.ValueOf(v)
|
||||||
|
if out.Kind() == reflect.Ptr && !out.IsNil() {
|
||||||
|
out = out.Elem()
|
||||||
|
}
|
||||||
|
d.unmarshal(node, out)
|
||||||
|
if len(d.terrors) > 0 {
|
||||||
|
return &TypeError{d.terrors}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func unmarshal(in []byte, out interface{}, strict bool) (err error) {
|
||||||
|
defer handleErr(&err)
|
||||||
|
d := newDecoder(strict)
|
||||||
|
p := newParser(in)
|
||||||
|
defer p.destroy()
|
||||||
|
node := p.parse()
|
||||||
|
if node != nil {
|
||||||
|
v := reflect.ValueOf(out)
|
||||||
|
if v.Kind() == reflect.Ptr && !v.IsNil() {
|
||||||
|
v = v.Elem()
|
||||||
|
}
|
||||||
|
d.unmarshal(node, v)
|
||||||
|
}
|
||||||
|
if len(d.terrors) > 0 {
|
||||||
|
return &TypeError{d.terrors}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marshal serializes the value provided into a YAML document. The structure
|
||||||
|
// of the generated document will reflect the structure of the value itself.
|
||||||
|
// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
|
||||||
|
//
|
||||||
|
// Struct fields are only marshalled if they are exported (have an upper case
|
||||||
|
// first letter), and are marshalled using the field name lowercased as the
|
||||||
|
// default key. Custom keys may be defined via the "yaml" name in the field
|
||||||
|
// tag: the content preceding the first comma is used as the key, and the
|
||||||
|
// following comma-separated options are used to tweak the marshalling process.
|
||||||
|
// Conflicting names result in a runtime error.
|
||||||
|
//
|
||||||
|
// The field tag format accepted is:
|
||||||
|
//
|
||||||
|
// `(...) yaml:"[<key>][,<flag1>[,<flag2>]]" (...)`
|
||||||
|
//
|
||||||
|
// The following flags are currently supported:
|
||||||
|
//
|
||||||
|
// omitempty Only include the field if it's not set to the zero
|
||||||
|
// value for the type or to empty slices or maps.
|
||||||
|
// Zero valued structs will be omitted if all their public
|
||||||
|
// fields are zero, unless they implement an IsZero
|
||||||
|
// method (see the IsZeroer interface type), in which
|
||||||
|
// case the field will be included if that method returns true.
|
||||||
|
//
|
||||||
|
// flow Marshal using a flow style (useful for structs,
|
||||||
|
// sequences and maps).
|
||||||
|
//
|
||||||
|
// inline Inline the field, which must be a struct or a map,
|
||||||
|
// causing all of its fields or keys to be processed as if
|
||||||
|
// they were part of the outer struct. For maps, keys must
|
||||||
|
// not conflict with the yaml keys of other struct fields.
|
||||||
|
//
|
||||||
|
// In addition, if the key is "-", the field is ignored.
|
||||||
|
//
|
||||||
|
// For example:
|
||||||
|
//
|
||||||
|
// type T struct {
|
||||||
|
// F int `yaml:"a,omitempty"`
|
||||||
|
// B int
|
||||||
|
// }
|
||||||
|
// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
|
||||||
|
// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n"
|
||||||
|
//
|
||||||
|
func Marshal(in interface{}) (out []byte, err error) {
|
||||||
|
defer handleErr(&err)
|
||||||
|
e := newEncoder()
|
||||||
|
defer e.destroy()
|
||||||
|
e.marshalDoc("", reflect.ValueOf(in))
|
||||||
|
e.finish()
|
||||||
|
out = e.out
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// An Encoder writes YAML values to an output stream.
|
||||||
|
type Encoder struct {
|
||||||
|
encoder *encoder
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEncoder returns a new encoder that writes to w.
|
||||||
|
// The Encoder should be closed after use to flush all data
|
||||||
|
// to w.
|
||||||
|
func NewEncoder(w io.Writer) *Encoder {
|
||||||
|
return &Encoder{
|
||||||
|
encoder: newEncoderWithWriter(w),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode writes the YAML encoding of v to the stream.
|
||||||
|
// If multiple items are encoded to the stream, the
|
||||||
|
// second and subsequent document will be preceded
|
||||||
|
// with a "---" document separator, but the first will not.
|
||||||
|
//
|
||||||
|
// See the documentation for Marshal for details about the conversion of Go
|
||||||
|
// values to YAML.
|
||||||
|
func (e *Encoder) Encode(v interface{}) (err error) {
|
||||||
|
defer handleErr(&err)
|
||||||
|
e.encoder.marshalDoc("", reflect.ValueOf(v))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close closes the encoder by writing any remaining data.
|
||||||
|
// It does not write a stream terminating string "...".
|
||||||
|
func (e *Encoder) Close() (err error) {
|
||||||
|
defer handleErr(&err)
|
||||||
|
e.encoder.finish()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleErr(err *error) {
|
||||||
|
if v := recover(); v != nil {
|
||||||
|
if e, ok := v.(yamlError); ok {
|
||||||
|
*err = e.err
|
||||||
|
} else {
|
||||||
|
panic(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type yamlError struct {
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func fail(err error) {
|
||||||
|
panic(yamlError{err})
|
||||||
|
}
|
||||||
|
|
||||||
|
func failf(format string, args ...interface{}) {
|
||||||
|
panic(yamlError{fmt.Errorf("yaml: "+format, args...)})
|
||||||
|
}
|
||||||
|
|
||||||
|
// A TypeError is returned by Unmarshal when one or more fields in
|
||||||
|
// the YAML document cannot be properly decoded into the requested
|
||||||
|
// types. When this error is returned, the value is still
|
||||||
|
// unmarshaled partially.
|
||||||
|
type TypeError struct {
|
||||||
|
Errors []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *TypeError) Error() string {
|
||||||
|
return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n "))
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------------------------------
|
||||||
|
// Maintain a mapping of keys to structure field indexes
|
||||||
|
|
||||||
|
// The code in this section was copied from mgo/bson.
|
||||||
|
|
||||||
|
// structInfo holds details for the serialization of fields of
|
||||||
|
// a given struct.
|
||||||
|
type structInfo struct {
|
||||||
|
FieldsMap map[string]fieldInfo
|
||||||
|
FieldsList []fieldInfo
|
||||||
|
|
||||||
|
// InlineMap is the number of the field in the struct that
|
||||||
|
// contains an ,inline map, or -1 if there's none.
|
||||||
|
InlineMap int
|
||||||
|
}
|
||||||
|
|
||||||
|
type fieldInfo struct {
|
||||||
|
Key string
|
||||||
|
Num int
|
||||||
|
OmitEmpty bool
|
||||||
|
Flow bool
|
||||||
|
// Id holds the unique field identifier, so we can cheaply
|
||||||
|
// check for field duplicates without maintaining an extra map.
|
||||||
|
Id int
|
||||||
|
|
||||||
|
// Inline holds the field index if the field is part of an inlined struct.
|
||||||
|
Inline []int
|
||||||
|
}
|
||||||
|
|
||||||
|
var structMap = make(map[reflect.Type]*structInfo)
|
||||||
|
var fieldMapMutex sync.RWMutex
|
||||||
|
|
||||||
|
func getStructInfo(st reflect.Type) (*structInfo, error) {
|
||||||
|
fieldMapMutex.RLock()
|
||||||
|
sinfo, found := structMap[st]
|
||||||
|
fieldMapMutex.RUnlock()
|
||||||
|
if found {
|
||||||
|
return sinfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
n := st.NumField()
|
||||||
|
fieldsMap := make(map[string]fieldInfo)
|
||||||
|
fieldsList := make([]fieldInfo, 0, n)
|
||||||
|
inlineMap := -1
|
||||||
|
for i := 0; i != n; i++ {
|
||||||
|
field := st.Field(i)
|
||||||
|
if field.PkgPath != "" && !field.Anonymous {
|
||||||
|
continue // Private field
|
||||||
|
}
|
||||||
|
|
||||||
|
info := fieldInfo{Num: i}
|
||||||
|
|
||||||
|
tag := field.Tag.Get("yaml")
|
||||||
|
if tag == "" && strings.Index(string(field.Tag), ":") < 0 {
|
||||||
|
tag = string(field.Tag)
|
||||||
|
}
|
||||||
|
if tag == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
inline := false
|
||||||
|
fields := strings.Split(tag, ",")
|
||||||
|
if len(fields) > 1 {
|
||||||
|
for _, flag := range fields[1:] {
|
||||||
|
switch flag {
|
||||||
|
case "omitempty":
|
||||||
|
info.OmitEmpty = true
|
||||||
|
case "flow":
|
||||||
|
info.Flow = true
|
||||||
|
case "inline":
|
||||||
|
inline = true
|
||||||
|
default:
|
||||||
|
return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tag = fields[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
if inline {
|
||||||
|
switch field.Type.Kind() {
|
||||||
|
case reflect.Map:
|
||||||
|
if inlineMap >= 0 {
|
||||||
|
return nil, errors.New("Multiple ,inline maps in struct " + st.String())
|
||||||
|
}
|
||||||
|
if field.Type.Key() != reflect.TypeOf("") {
|
||||||
|
return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String())
|
||||||
|
}
|
||||||
|
inlineMap = info.Num
|
||||||
|
case reflect.Struct:
|
||||||
|
sinfo, err := getStructInfo(field.Type)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, finfo := range sinfo.FieldsList {
|
||||||
|
if _, found := fieldsMap[finfo.Key]; found {
|
||||||
|
msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String()
|
||||||
|
return nil, errors.New(msg)
|
||||||
|
}
|
||||||
|
if finfo.Inline == nil {
|
||||||
|
finfo.Inline = []int{i, finfo.Num}
|
||||||
|
} else {
|
||||||
|
finfo.Inline = append([]int{i}, finfo.Inline...)
|
||||||
|
}
|
||||||
|
finfo.Id = len(fieldsList)
|
||||||
|
fieldsMap[finfo.Key] = finfo
|
||||||
|
fieldsList = append(fieldsList, finfo)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
//return nil, errors.New("Option ,inline needs a struct value or map field")
|
||||||
|
return nil, errors.New("Option ,inline needs a struct value field")
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if tag != "" {
|
||||||
|
info.Key = tag
|
||||||
|
} else {
|
||||||
|
info.Key = strings.ToLower(field.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, found = fieldsMap[info.Key]; found {
|
||||||
|
msg := "Duplicated key '" + info.Key + "' in struct " + st.String()
|
||||||
|
return nil, errors.New(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
info.Id = len(fieldsList)
|
||||||
|
fieldsList = append(fieldsList, info)
|
||||||
|
fieldsMap[info.Key] = info
|
||||||
|
}
|
||||||
|
|
||||||
|
sinfo = &structInfo{
|
||||||
|
FieldsMap: fieldsMap,
|
||||||
|
FieldsList: fieldsList,
|
||||||
|
InlineMap: inlineMap,
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldMapMutex.Lock()
|
||||||
|
structMap[st] = sinfo
|
||||||
|
fieldMapMutex.Unlock()
|
||||||
|
return sinfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsZeroer is used to check whether an object is zero to
|
||||||
|
// determine whether it should be omitted when marshaling
|
||||||
|
// with the omitempty flag. One notable implementation
|
||||||
|
// is time.Time.
|
||||||
|
type IsZeroer interface {
|
||||||
|
IsZero() bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func isZero(v reflect.Value) bool {
|
||||||
|
kind := v.Kind()
|
||||||
|
if z, ok := v.Interface().(IsZeroer); ok {
|
||||||
|
if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return z.IsZero()
|
||||||
|
}
|
||||||
|
switch kind {
|
||||||
|
case reflect.String:
|
||||||
|
return len(v.String()) == 0
|
||||||
|
case reflect.Interface, reflect.Ptr:
|
||||||
|
return v.IsNil()
|
||||||
|
case reflect.Slice:
|
||||||
|
return v.Len() == 0
|
||||||
|
case reflect.Map:
|
||||||
|
return v.Len() == 0
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
return v.Int() == 0
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return v.Float() == 0
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
|
return v.Uint() == 0
|
||||||
|
case reflect.Bool:
|
||||||
|
return !v.Bool()
|
||||||
|
case reflect.Struct:
|
||||||
|
vt := v.Type()
|
||||||
|
for i := v.NumField() - 1; i >= 0; i-- {
|
||||||
|
if vt.Field(i).PkgPath != "" {
|
||||||
|
continue // Private field
|
||||||
|
}
|
||||||
|
if !isZero(v.Field(i)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
738
vendor/gopkg.in/yaml.v2/yamlh.go
generated
vendored
Normal file
738
vendor/gopkg.in/yaml.v2/yamlh.go
generated
vendored
Normal file
@ -0,0 +1,738 @@
|
|||||||
|
package yaml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
// The version directive data.
|
||||||
|
type yaml_version_directive_t struct {
|
||||||
|
major int8 // The major version number.
|
||||||
|
minor int8 // The minor version number.
|
||||||
|
}
|
||||||
|
|
||||||
|
// The tag directive data.
|
||||||
|
type yaml_tag_directive_t struct {
|
||||||
|
handle []byte // The tag handle.
|
||||||
|
prefix []byte // The tag prefix.
|
||||||
|
}
|
||||||
|
|
||||||
|
type yaml_encoding_t int
|
||||||
|
|
||||||
|
// The stream encoding.
|
||||||
|
const (
|
||||||
|
// Let the parser choose the encoding.
|
||||||
|
yaml_ANY_ENCODING yaml_encoding_t = iota
|
||||||
|
|
||||||
|
yaml_UTF8_ENCODING // The default UTF-8 encoding.
|
||||||
|
yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM.
|
||||||
|
yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM.
|
||||||
|
)
|
||||||
|
|
||||||
|
type yaml_break_t int
|
||||||
|
|
||||||
|
// Line break types.
|
||||||
|
const (
|
||||||
|
// Let the parser choose the break type.
|
||||||
|
yaml_ANY_BREAK yaml_break_t = iota
|
||||||
|
|
||||||
|
yaml_CR_BREAK // Use CR for line breaks (Mac style).
|
||||||
|
yaml_LN_BREAK // Use LN for line breaks (Unix style).
|
||||||
|
yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style).
|
||||||
|
)
|
||||||
|
|
||||||
|
type yaml_error_type_t int
|
||||||
|
|
||||||
|
// Many bad things could happen with the parser and emitter.
|
||||||
|
const (
|
||||||
|
// No error is produced.
|
||||||
|
yaml_NO_ERROR yaml_error_type_t = iota
|
||||||
|
|
||||||
|
yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory.
|
||||||
|
yaml_READER_ERROR // Cannot read or decode the input stream.
|
||||||
|
yaml_SCANNER_ERROR // Cannot scan the input stream.
|
||||||
|
yaml_PARSER_ERROR // Cannot parse the input stream.
|
||||||
|
yaml_COMPOSER_ERROR // Cannot compose a YAML document.
|
||||||
|
yaml_WRITER_ERROR // Cannot write to the output stream.
|
||||||
|
yaml_EMITTER_ERROR // Cannot emit a YAML stream.
|
||||||
|
)
|
||||||
|
|
||||||
|
// The pointer position.
|
||||||
|
type yaml_mark_t struct {
|
||||||
|
index int // The position index.
|
||||||
|
line int // The position line.
|
||||||
|
column int // The position column.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node Styles
|
||||||
|
|
||||||
|
type yaml_style_t int8
|
||||||
|
|
||||||
|
type yaml_scalar_style_t yaml_style_t
|
||||||
|
|
||||||
|
// Scalar styles.
|
||||||
|
const (
|
||||||
|
// Let the emitter choose the style.
|
||||||
|
yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota
|
||||||
|
|
||||||
|
yaml_PLAIN_SCALAR_STYLE // The plain scalar style.
|
||||||
|
yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style.
|
||||||
|
yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style.
|
||||||
|
yaml_LITERAL_SCALAR_STYLE // The literal scalar style.
|
||||||
|
yaml_FOLDED_SCALAR_STYLE // The folded scalar style.
|
||||||
|
)
|
||||||
|
|
||||||
|
type yaml_sequence_style_t yaml_style_t
|
||||||
|
|
||||||
|
// Sequence styles.
|
||||||
|
const (
|
||||||
|
// Let the emitter choose the style.
|
||||||
|
yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota
|
||||||
|
|
||||||
|
yaml_BLOCK_SEQUENCE_STYLE // The block sequence style.
|
||||||
|
yaml_FLOW_SEQUENCE_STYLE // The flow sequence style.
|
||||||
|
)
|
||||||
|
|
||||||
|
type yaml_mapping_style_t yaml_style_t
|
||||||
|
|
||||||
|
// Mapping styles.
|
||||||
|
const (
|
||||||
|
// Let the emitter choose the style.
|
||||||
|
yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota
|
||||||
|
|
||||||
|
yaml_BLOCK_MAPPING_STYLE // The block mapping style.
|
||||||
|
yaml_FLOW_MAPPING_STYLE // The flow mapping style.
|
||||||
|
)
|
||||||
|
|
||||||
|
// Tokens
|
||||||
|
|
||||||
|
type yaml_token_type_t int
|
||||||
|
|
||||||
|
// Token types.
|
||||||
|
const (
|
||||||
|
// An empty token.
|
||||||
|
yaml_NO_TOKEN yaml_token_type_t = iota
|
||||||
|
|
||||||
|
yaml_STREAM_START_TOKEN // A STREAM-START token.
|
||||||
|
yaml_STREAM_END_TOKEN // A STREAM-END token.
|
||||||
|
|
||||||
|
yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token.
|
||||||
|
yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token.
|
||||||
|
yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token.
|
||||||
|
yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token.
|
||||||
|
|
||||||
|
yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token.
|
||||||
|
yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token.
|
||||||
|
yaml_BLOCK_END_TOKEN // A BLOCK-END token.
|
||||||
|
|
||||||
|
yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token.
|
||||||
|
yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token.
|
||||||
|
yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token.
|
||||||
|
yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token.
|
||||||
|
|
||||||
|
yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token.
|
||||||
|
yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token.
|
||||||
|
yaml_KEY_TOKEN // A KEY token.
|
||||||
|
yaml_VALUE_TOKEN // A VALUE token.
|
||||||
|
|
||||||
|
yaml_ALIAS_TOKEN // An ALIAS token.
|
||||||
|
yaml_ANCHOR_TOKEN // An ANCHOR token.
|
||||||
|
yaml_TAG_TOKEN // A TAG token.
|
||||||
|
yaml_SCALAR_TOKEN // A SCALAR token.
|
||||||
|
)
|
||||||
|
|
||||||
|
func (tt yaml_token_type_t) String() string {
|
||||||
|
switch tt {
|
||||||
|
case yaml_NO_TOKEN:
|
||||||
|
return "yaml_NO_TOKEN"
|
||||||
|
case yaml_STREAM_START_TOKEN:
|
||||||
|
return "yaml_STREAM_START_TOKEN"
|
||||||
|
case yaml_STREAM_END_TOKEN:
|
||||||
|
return "yaml_STREAM_END_TOKEN"
|
||||||
|
case yaml_VERSION_DIRECTIVE_TOKEN:
|
||||||
|
return "yaml_VERSION_DIRECTIVE_TOKEN"
|
||||||
|
case yaml_TAG_DIRECTIVE_TOKEN:
|
||||||
|
return "yaml_TAG_DIRECTIVE_TOKEN"
|
||||||
|
case yaml_DOCUMENT_START_TOKEN:
|
||||||
|
return "yaml_DOCUMENT_START_TOKEN"
|
||||||
|
case yaml_DOCUMENT_END_TOKEN:
|
||||||
|
return "yaml_DOCUMENT_END_TOKEN"
|
||||||
|
case yaml_BLOCK_SEQUENCE_START_TOKEN:
|
||||||
|
return "yaml_BLOCK_SEQUENCE_START_TOKEN"
|
||||||
|
case yaml_BLOCK_MAPPING_START_TOKEN:
|
||||||
|
return "yaml_BLOCK_MAPPING_START_TOKEN"
|
||||||
|
case yaml_BLOCK_END_TOKEN:
|
||||||
|
return "yaml_BLOCK_END_TOKEN"
|
||||||
|
case yaml_FLOW_SEQUENCE_START_TOKEN:
|
||||||
|
return "yaml_FLOW_SEQUENCE_START_TOKEN"
|
||||||
|
case yaml_FLOW_SEQUENCE_END_TOKEN:
|
||||||
|
return "yaml_FLOW_SEQUENCE_END_TOKEN"
|
||||||
|
case yaml_FLOW_MAPPING_START_TOKEN:
|
||||||
|
return "yaml_FLOW_MAPPING_START_TOKEN"
|
||||||
|
case yaml_FLOW_MAPPING_END_TOKEN:
|
||||||
|
return "yaml_FLOW_MAPPING_END_TOKEN"
|
||||||
|
case yaml_BLOCK_ENTRY_TOKEN:
|
||||||
|
return "yaml_BLOCK_ENTRY_TOKEN"
|
||||||
|
case yaml_FLOW_ENTRY_TOKEN:
|
||||||
|
return "yaml_FLOW_ENTRY_TOKEN"
|
||||||
|
case yaml_KEY_TOKEN:
|
||||||
|
return "yaml_KEY_TOKEN"
|
||||||
|
case yaml_VALUE_TOKEN:
|
||||||
|
return "yaml_VALUE_TOKEN"
|
||||||
|
case yaml_ALIAS_TOKEN:
|
||||||
|
return "yaml_ALIAS_TOKEN"
|
||||||
|
case yaml_ANCHOR_TOKEN:
|
||||||
|
return "yaml_ANCHOR_TOKEN"
|
||||||
|
case yaml_TAG_TOKEN:
|
||||||
|
return "yaml_TAG_TOKEN"
|
||||||
|
case yaml_SCALAR_TOKEN:
|
||||||
|
return "yaml_SCALAR_TOKEN"
|
||||||
|
}
|
||||||
|
return "<unknown token>"
|
||||||
|
}
|
||||||
|
|
||||||
|
// The token structure.
|
||||||
|
type yaml_token_t struct {
|
||||||
|
// The token type.
|
||||||
|
typ yaml_token_type_t
|
||||||
|
|
||||||
|
// The start/end of the token.
|
||||||
|
start_mark, end_mark yaml_mark_t
|
||||||
|
|
||||||
|
// The stream encoding (for yaml_STREAM_START_TOKEN).
|
||||||
|
encoding yaml_encoding_t
|
||||||
|
|
||||||
|
// The alias/anchor/scalar value or tag/tag directive handle
|
||||||
|
// (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN).
|
||||||
|
value []byte
|
||||||
|
|
||||||
|
// The tag suffix (for yaml_TAG_TOKEN).
|
||||||
|
suffix []byte
|
||||||
|
|
||||||
|
// The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN).
|
||||||
|
prefix []byte
|
||||||
|
|
||||||
|
// The scalar style (for yaml_SCALAR_TOKEN).
|
||||||
|
style yaml_scalar_style_t
|
||||||
|
|
||||||
|
// The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN).
|
||||||
|
major, minor int8
|
||||||
|
}
|
||||||
|
|
||||||
|
// Events
|
||||||
|
|
||||||
|
type yaml_event_type_t int8
|
||||||
|
|
||||||
|
// Event types.
|
||||||
|
const (
|
||||||
|
// An empty event.
|
||||||
|
yaml_NO_EVENT yaml_event_type_t = iota
|
||||||
|
|
||||||
|
yaml_STREAM_START_EVENT // A STREAM-START event.
|
||||||
|
yaml_STREAM_END_EVENT // A STREAM-END event.
|
||||||
|
yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event.
|
||||||
|
yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event.
|
||||||
|
yaml_ALIAS_EVENT // An ALIAS event.
|
||||||
|
yaml_SCALAR_EVENT // A SCALAR event.
|
||||||
|
yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event.
|
||||||
|
yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event.
|
||||||
|
yaml_MAPPING_START_EVENT // A MAPPING-START event.
|
||||||
|
yaml_MAPPING_END_EVENT // A MAPPING-END event.
|
||||||
|
)
|
||||||
|
|
||||||
|
var eventStrings = []string{
|
||||||
|
yaml_NO_EVENT: "none",
|
||||||
|
yaml_STREAM_START_EVENT: "stream start",
|
||||||
|
yaml_STREAM_END_EVENT: "stream end",
|
||||||
|
yaml_DOCUMENT_START_EVENT: "document start",
|
||||||
|
yaml_DOCUMENT_END_EVENT: "document end",
|
||||||
|
yaml_ALIAS_EVENT: "alias",
|
||||||
|
yaml_SCALAR_EVENT: "scalar",
|
||||||
|
yaml_SEQUENCE_START_EVENT: "sequence start",
|
||||||
|
yaml_SEQUENCE_END_EVENT: "sequence end",
|
||||||
|
yaml_MAPPING_START_EVENT: "mapping start",
|
||||||
|
yaml_MAPPING_END_EVENT: "mapping end",
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e yaml_event_type_t) String() string {
|
||||||
|
if e < 0 || int(e) >= len(eventStrings) {
|
||||||
|
return fmt.Sprintf("unknown event %d", e)
|
||||||
|
}
|
||||||
|
return eventStrings[e]
|
||||||
|
}
|
||||||
|
|
||||||
|
// The event structure.
|
||||||
|
type yaml_event_t struct {
|
||||||
|
|
||||||
|
// The event type.
|
||||||
|
typ yaml_event_type_t
|
||||||
|
|
||||||
|
// The start and end of the event.
|
||||||
|
start_mark, end_mark yaml_mark_t
|
||||||
|
|
||||||
|
// The document encoding (for yaml_STREAM_START_EVENT).
|
||||||
|
encoding yaml_encoding_t
|
||||||
|
|
||||||
|
// The version directive (for yaml_DOCUMENT_START_EVENT).
|
||||||
|
version_directive *yaml_version_directive_t
|
||||||
|
|
||||||
|
// The list of tag directives (for yaml_DOCUMENT_START_EVENT).
|
||||||
|
tag_directives []yaml_tag_directive_t
|
||||||
|
|
||||||
|
// The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT).
|
||||||
|
anchor []byte
|
||||||
|
|
||||||
|
// The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
|
||||||
|
tag []byte
|
||||||
|
|
||||||
|
// The scalar value (for yaml_SCALAR_EVENT).
|
||||||
|
value []byte
|
||||||
|
|
||||||
|
// Is the document start/end indicator implicit, or the tag optional?
|
||||||
|
// (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT).
|
||||||
|
implicit bool
|
||||||
|
|
||||||
|
// Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT).
|
||||||
|
quoted_implicit bool
|
||||||
|
|
||||||
|
// The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
|
||||||
|
style yaml_style_t
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) }
|
||||||
|
func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) }
|
||||||
|
func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) }
|
||||||
|
|
||||||
|
// Nodes
|
||||||
|
|
||||||
|
const (
|
||||||
|
yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null.
|
||||||
|
yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false.
|
||||||
|
yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values.
|
||||||
|
yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values.
|
||||||
|
yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values.
|
||||||
|
yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values.
|
||||||
|
|
||||||
|
yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences.
|
||||||
|
yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping.
|
||||||
|
|
||||||
|
// Not in original libyaml.
|
||||||
|
yaml_BINARY_TAG = "tag:yaml.org,2002:binary"
|
||||||
|
yaml_MERGE_TAG = "tag:yaml.org,2002:merge"
|
||||||
|
|
||||||
|
yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str.
|
||||||
|
yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq.
|
||||||
|
yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map.
|
||||||
|
)
|
||||||
|
|
||||||
|
type yaml_node_type_t int
|
||||||
|
|
||||||
|
// Node types.
|
||||||
|
const (
|
||||||
|
// An empty node.
|
||||||
|
yaml_NO_NODE yaml_node_type_t = iota
|
||||||
|
|
||||||
|
yaml_SCALAR_NODE // A scalar node.
|
||||||
|
yaml_SEQUENCE_NODE // A sequence node.
|
||||||
|
yaml_MAPPING_NODE // A mapping node.
|
||||||
|
)
|
||||||
|
|
||||||
|
// An element of a sequence node.
|
||||||
|
type yaml_node_item_t int
|
||||||
|
|
||||||
|
// An element of a mapping node.
|
||||||
|
type yaml_node_pair_t struct {
|
||||||
|
key int // The key of the element.
|
||||||
|
value int // The value of the element.
|
||||||
|
}
|
||||||
|
|
||||||
|
// The node structure.
|
||||||
|
type yaml_node_t struct {
|
||||||
|
typ yaml_node_type_t // The node type.
|
||||||
|
tag []byte // The node tag.
|
||||||
|
|
||||||
|
// The node data.
|
||||||
|
|
||||||
|
// The scalar parameters (for yaml_SCALAR_NODE).
|
||||||
|
scalar struct {
|
||||||
|
value []byte // The scalar value.
|
||||||
|
length int // The length of the scalar value.
|
||||||
|
style yaml_scalar_style_t // The scalar style.
|
||||||
|
}
|
||||||
|
|
||||||
|
// The sequence parameters (for YAML_SEQUENCE_NODE).
|
||||||
|
sequence struct {
|
||||||
|
items_data []yaml_node_item_t // The stack of sequence items.
|
||||||
|
style yaml_sequence_style_t // The sequence style.
|
||||||
|
}
|
||||||
|
|
||||||
|
// The mapping parameters (for yaml_MAPPING_NODE).
|
||||||
|
mapping struct {
|
||||||
|
pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value).
|
||||||
|
pairs_start *yaml_node_pair_t // The beginning of the stack.
|
||||||
|
pairs_end *yaml_node_pair_t // The end of the stack.
|
||||||
|
pairs_top *yaml_node_pair_t // The top of the stack.
|
||||||
|
style yaml_mapping_style_t // The mapping style.
|
||||||
|
}
|
||||||
|
|
||||||
|
start_mark yaml_mark_t // The beginning of the node.
|
||||||
|
end_mark yaml_mark_t // The end of the node.
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// The document structure.
|
||||||
|
type yaml_document_t struct {
|
||||||
|
|
||||||
|
// The document nodes.
|
||||||
|
nodes []yaml_node_t
|
||||||
|
|
||||||
|
// The version directive.
|
||||||
|
version_directive *yaml_version_directive_t
|
||||||
|
|
||||||
|
// The list of tag directives.
|
||||||
|
tag_directives_data []yaml_tag_directive_t
|
||||||
|
tag_directives_start int // The beginning of the tag directives list.
|
||||||
|
tag_directives_end int // The end of the tag directives list.
|
||||||
|
|
||||||
|
start_implicit int // Is the document start indicator implicit?
|
||||||
|
end_implicit int // Is the document end indicator implicit?
|
||||||
|
|
||||||
|
// The start/end of the document.
|
||||||
|
start_mark, end_mark yaml_mark_t
|
||||||
|
}
|
||||||
|
|
||||||
|
// The prototype of a read handler.
|
||||||
|
//
|
||||||
|
// The read handler is called when the parser needs to read more bytes from the
|
||||||
|
// source. The handler should write not more than size bytes to the buffer.
|
||||||
|
// The number of written bytes should be set to the size_read variable.
|
||||||
|
//
|
||||||
|
// [in,out] data A pointer to an application data specified by
|
||||||
|
// yaml_parser_set_input().
|
||||||
|
// [out] buffer The buffer to write the data from the source.
|
||||||
|
// [in] size The size of the buffer.
|
||||||
|
// [out] size_read The actual number of bytes read from the source.
|
||||||
|
//
|
||||||
|
// On success, the handler should return 1. If the handler failed,
|
||||||
|
// the returned value should be 0. On EOF, the handler should set the
|
||||||
|
// size_read to 0 and return 1.
|
||||||
|
type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error)
|
||||||
|
|
||||||
|
// This structure holds information about a potential simple key.
|
||||||
|
type yaml_simple_key_t struct {
|
||||||
|
possible bool // Is a simple key possible?
|
||||||
|
required bool // Is a simple key required?
|
||||||
|
token_number int // The number of the token.
|
||||||
|
mark yaml_mark_t // The position mark.
|
||||||
|
}
|
||||||
|
|
||||||
|
// The states of the parser.
|
||||||
|
type yaml_parser_state_t int
|
||||||
|
|
||||||
|
const (
|
||||||
|
yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota
|
||||||
|
|
||||||
|
yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document.
|
||||||
|
yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START.
|
||||||
|
yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document.
|
||||||
|
yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END.
|
||||||
|
yaml_PARSE_BLOCK_NODE_STATE // Expect a block node.
|
||||||
|
yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence.
|
||||||
|
yaml_PARSE_FLOW_NODE_STATE // Expect a flow node.
|
||||||
|
yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence.
|
||||||
|
yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence.
|
||||||
|
yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence.
|
||||||
|
yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
|
||||||
|
yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key.
|
||||||
|
yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value.
|
||||||
|
yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence.
|
||||||
|
yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence.
|
||||||
|
yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping.
|
||||||
|
yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping.
|
||||||
|
yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry.
|
||||||
|
yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
|
||||||
|
yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
|
||||||
|
yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
|
||||||
|
yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping.
|
||||||
|
yaml_PARSE_END_STATE // Expect nothing.
|
||||||
|
)
|
||||||
|
|
||||||
|
func (ps yaml_parser_state_t) String() string {
|
||||||
|
switch ps {
|
||||||
|
case yaml_PARSE_STREAM_START_STATE:
|
||||||
|
return "yaml_PARSE_STREAM_START_STATE"
|
||||||
|
case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
|
||||||
|
return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE"
|
||||||
|
case yaml_PARSE_DOCUMENT_START_STATE:
|
||||||
|
return "yaml_PARSE_DOCUMENT_START_STATE"
|
||||||
|
case yaml_PARSE_DOCUMENT_CONTENT_STATE:
|
||||||
|
return "yaml_PARSE_DOCUMENT_CONTENT_STATE"
|
||||||
|
case yaml_PARSE_DOCUMENT_END_STATE:
|
||||||
|
return "yaml_PARSE_DOCUMENT_END_STATE"
|
||||||
|
case yaml_PARSE_BLOCK_NODE_STATE:
|
||||||
|
return "yaml_PARSE_BLOCK_NODE_STATE"
|
||||||
|
case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
|
||||||
|
return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE"
|
||||||
|
case yaml_PARSE_FLOW_NODE_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_NODE_STATE"
|
||||||
|
case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
|
||||||
|
return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE"
|
||||||
|
case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
|
||||||
|
return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE"
|
||||||
|
case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
|
||||||
|
return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE"
|
||||||
|
case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
|
||||||
|
return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE"
|
||||||
|
case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
|
||||||
|
return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE"
|
||||||
|
case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
|
||||||
|
return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE"
|
||||||
|
case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE"
|
||||||
|
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE"
|
||||||
|
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE"
|
||||||
|
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE"
|
||||||
|
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE"
|
||||||
|
case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE"
|
||||||
|
case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_MAPPING_KEY_STATE"
|
||||||
|
case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE"
|
||||||
|
case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
|
||||||
|
return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE"
|
||||||
|
case yaml_PARSE_END_STATE:
|
||||||
|
return "yaml_PARSE_END_STATE"
|
||||||
|
}
|
||||||
|
return "<unknown parser state>"
|
||||||
|
}
|
||||||
|
|
||||||
|
// This structure holds aliases data.
|
||||||
|
type yaml_alias_data_t struct {
|
||||||
|
anchor []byte // The anchor.
|
||||||
|
index int // The node id.
|
||||||
|
mark yaml_mark_t // The anchor mark.
|
||||||
|
}
|
||||||
|
|
||||||
|
// The parser structure.
|
||||||
|
//
|
||||||
|
// All members are internal. Manage the structure using the
|
||||||
|
// yaml_parser_ family of functions.
|
||||||
|
type yaml_parser_t struct {
|
||||||
|
|
||||||
|
// Error handling
|
||||||
|
|
||||||
|
error yaml_error_type_t // Error type.
|
||||||
|
|
||||||
|
problem string // Error description.
|
||||||
|
|
||||||
|
// The byte about which the problem occurred.
|
||||||
|
problem_offset int
|
||||||
|
problem_value int
|
||||||
|
problem_mark yaml_mark_t
|
||||||
|
|
||||||
|
// The error context.
|
||||||
|
context string
|
||||||
|
context_mark yaml_mark_t
|
||||||
|
|
||||||
|
// Reader stuff
|
||||||
|
|
||||||
|
read_handler yaml_read_handler_t // Read handler.
|
||||||
|
|
||||||
|
input_reader io.Reader // File input data.
|
||||||
|
input []byte // String input data.
|
||||||
|
input_pos int
|
||||||
|
|
||||||
|
eof bool // EOF flag
|
||||||
|
|
||||||
|
buffer []byte // The working buffer.
|
||||||
|
buffer_pos int // The current position of the buffer.
|
||||||
|
|
||||||
|
unread int // The number of unread characters in the buffer.
|
||||||
|
|
||||||
|
raw_buffer []byte // The raw buffer.
|
||||||
|
raw_buffer_pos int // The current position of the buffer.
|
||||||
|
|
||||||
|
encoding yaml_encoding_t // The input encoding.
|
||||||
|
|
||||||
|
offset int // The offset of the current position (in bytes).
|
||||||
|
mark yaml_mark_t // The mark of the current position.
|
||||||
|
|
||||||
|
// Scanner stuff
|
||||||
|
|
||||||
|
stream_start_produced bool // Have we started to scan the input stream?
|
||||||
|
stream_end_produced bool // Have we reached the end of the input stream?
|
||||||
|
|
||||||
|
flow_level int // The number of unclosed '[' and '{' indicators.
|
||||||
|
|
||||||
|
tokens []yaml_token_t // The tokens queue.
|
||||||
|
tokens_head int // The head of the tokens queue.
|
||||||
|
tokens_parsed int // The number of tokens fetched from the queue.
|
||||||
|
token_available bool // Does the tokens queue contain a token ready for dequeueing.
|
||||||
|
|
||||||
|
indent int // The current indentation level.
|
||||||
|
indents []int // The indentation levels stack.
|
||||||
|
|
||||||
|
simple_key_allowed bool // May a simple key occur at the current position?
|
||||||
|
simple_keys []yaml_simple_key_t // The stack of simple keys.
|
||||||
|
|
||||||
|
// Parser stuff
|
||||||
|
|
||||||
|
state yaml_parser_state_t // The current parser state.
|
||||||
|
states []yaml_parser_state_t // The parser states stack.
|
||||||
|
marks []yaml_mark_t // The stack of marks.
|
||||||
|
tag_directives []yaml_tag_directive_t // The list of TAG directives.
|
||||||
|
|
||||||
|
// Dumper stuff
|
||||||
|
|
||||||
|
aliases []yaml_alias_data_t // The alias data.
|
||||||
|
|
||||||
|
document *yaml_document_t // The currently parsed document.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emitter Definitions
|
||||||
|
|
||||||
|
// The prototype of a write handler.
|
||||||
|
//
|
||||||
|
// The write handler is called when the emitter needs to flush the accumulated
|
||||||
|
// characters to the output. The handler should write @a size bytes of the
|
||||||
|
// @a buffer to the output.
|
||||||
|
//
|
||||||
|
// @param[in,out] data A pointer to an application data specified by
|
||||||
|
// yaml_emitter_set_output().
|
||||||
|
// @param[in] buffer The buffer with bytes to be written.
|
||||||
|
// @param[in] size The size of the buffer.
|
||||||
|
//
|
||||||
|
// @returns On success, the handler should return @c 1. If the handler failed,
|
||||||
|
// the returned value should be @c 0.
|
||||||
|
//
|
||||||
|
type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error
|
||||||
|
|
||||||
|
type yaml_emitter_state_t int
|
||||||
|
|
||||||
|
// The emitter states.
|
||||||
|
const (
|
||||||
|
// Expect STREAM-START.
|
||||||
|
yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota
|
||||||
|
|
||||||
|
yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END.
|
||||||
|
yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END.
|
||||||
|
yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document.
|
||||||
|
yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END.
|
||||||
|
yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence.
|
||||||
|
yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence.
|
||||||
|
yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
|
||||||
|
yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
|
||||||
|
yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping.
|
||||||
|
yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
|
||||||
|
yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence.
|
||||||
|
yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence.
|
||||||
|
yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
|
||||||
|
yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping.
|
||||||
|
yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping.
|
||||||
|
yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping.
|
||||||
|
yaml_EMIT_END_STATE // Expect nothing.
|
||||||
|
)
|
||||||
|
|
||||||
|
// The emitter structure.
|
||||||
|
//
|
||||||
|
// All members are internal. Manage the structure using the @c yaml_emitter_
|
||||||
|
// family of functions.
|
||||||
|
type yaml_emitter_t struct {
|
||||||
|
|
||||||
|
// Error handling
|
||||||
|
|
||||||
|
error yaml_error_type_t // Error type.
|
||||||
|
problem string // Error description.
|
||||||
|
|
||||||
|
// Writer stuff
|
||||||
|
|
||||||
|
write_handler yaml_write_handler_t // Write handler.
|
||||||
|
|
||||||
|
output_buffer *[]byte // String output data.
|
||||||
|
output_writer io.Writer // File output data.
|
||||||
|
|
||||||
|
buffer []byte // The working buffer.
|
||||||
|
buffer_pos int // The current position of the buffer.
|
||||||
|
|
||||||
|
raw_buffer []byte // The raw buffer.
|
||||||
|
raw_buffer_pos int // The current position of the buffer.
|
||||||
|
|
||||||
|
encoding yaml_encoding_t // The stream encoding.
|
||||||
|
|
||||||
|
// Emitter stuff
|
||||||
|
|
||||||
|
canonical bool // If the output is in the canonical style?
|
||||||
|
best_indent int // The number of indentation spaces.
|
||||||
|
best_width int // The preferred width of the output lines.
|
||||||
|
unicode bool // Allow unescaped non-ASCII characters?
|
||||||
|
line_break yaml_break_t // The preferred line break.
|
||||||
|
|
||||||
|
state yaml_emitter_state_t // The current emitter state.
|
||||||
|
states []yaml_emitter_state_t // The stack of states.
|
||||||
|
|
||||||
|
events []yaml_event_t // The event queue.
|
||||||
|
events_head int // The head of the event queue.
|
||||||
|
|
||||||
|
indents []int // The stack of indentation levels.
|
||||||
|
|
||||||
|
tag_directives []yaml_tag_directive_t // The list of tag directives.
|
||||||
|
|
||||||
|
indent int // The current indentation level.
|
||||||
|
|
||||||
|
flow_level int // The current flow level.
|
||||||
|
|
||||||
|
root_context bool // Is it the document root context?
|
||||||
|
sequence_context bool // Is it a sequence context?
|
||||||
|
mapping_context bool // Is it a mapping context?
|
||||||
|
simple_key_context bool // Is it a simple mapping key context?
|
||||||
|
|
||||||
|
line int // The current line.
|
||||||
|
column int // The current column.
|
||||||
|
whitespace bool // If the last character was a whitespace?
|
||||||
|
indention bool // If the last character was an indentation character (' ', '-', '?', ':')?
|
||||||
|
open_ended bool // If an explicit document end is required?
|
||||||
|
|
||||||
|
// Anchor analysis.
|
||||||
|
anchor_data struct {
|
||||||
|
anchor []byte // The anchor value.
|
||||||
|
alias bool // Is it an alias?
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tag analysis.
|
||||||
|
tag_data struct {
|
||||||
|
handle []byte // The tag handle.
|
||||||
|
suffix []byte // The tag suffix.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scalar analysis.
|
||||||
|
scalar_data struct {
|
||||||
|
value []byte // The scalar value.
|
||||||
|
multiline bool // Does the scalar contain line breaks?
|
||||||
|
flow_plain_allowed bool // Can the scalar be expessed in the flow plain style?
|
||||||
|
block_plain_allowed bool // Can the scalar be expressed in the block plain style?
|
||||||
|
single_quoted_allowed bool // Can the scalar be expressed in the single quoted style?
|
||||||
|
block_allowed bool // Can the scalar be expressed in the literal or folded styles?
|
||||||
|
style yaml_scalar_style_t // The output style.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dumper stuff
|
||||||
|
|
||||||
|
opened bool // If the stream was already opened?
|
||||||
|
closed bool // If the stream was already closed?
|
||||||
|
|
||||||
|
// The information associated with the document nodes.
|
||||||
|
anchors *struct {
|
||||||
|
references int // The number of references.
|
||||||
|
anchor int // The anchor id.
|
||||||
|
serialized bool // If the node has been emitted?
|
||||||
|
}
|
||||||
|
|
||||||
|
last_anchor_id int // The last assigned anchor id.
|
||||||
|
|
||||||
|
document *yaml_document_t // The currently emitted document.
|
||||||
|
}
|
173
vendor/gopkg.in/yaml.v2/yamlprivateh.go
generated
vendored
Normal file
173
vendor/gopkg.in/yaml.v2/yamlprivateh.go
generated
vendored
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
package yaml
|
||||||
|
|
||||||
|
const (
|
||||||
|
// The size of the input raw buffer.
|
||||||
|
input_raw_buffer_size = 512
|
||||||
|
|
||||||
|
// The size of the input buffer.
|
||||||
|
// It should be possible to decode the whole raw buffer.
|
||||||
|
input_buffer_size = input_raw_buffer_size * 3
|
||||||
|
|
||||||
|
// The size of the output buffer.
|
||||||
|
output_buffer_size = 128
|
||||||
|
|
||||||
|
// The size of the output raw buffer.
|
||||||
|
// It should be possible to encode the whole output buffer.
|
||||||
|
output_raw_buffer_size = (output_buffer_size*2 + 2)
|
||||||
|
|
||||||
|
// The size of other stacks and queues.
|
||||||
|
initial_stack_size = 16
|
||||||
|
initial_queue_size = 16
|
||||||
|
initial_string_size = 16
|
||||||
|
)
|
||||||
|
|
||||||
|
// Check if the character at the specified position is an alphabetical
|
||||||
|
// character, a digit, '_', or '-'.
|
||||||
|
func is_alpha(b []byte, i int) bool {
|
||||||
|
return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character at the specified position is a digit.
|
||||||
|
func is_digit(b []byte, i int) bool {
|
||||||
|
return b[i] >= '0' && b[i] <= '9'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the value of a digit.
|
||||||
|
func as_digit(b []byte, i int) int {
|
||||||
|
return int(b[i]) - '0'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character at the specified position is a hex-digit.
|
||||||
|
func is_hex(b []byte, i int) bool {
|
||||||
|
return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the value of a hex-digit.
|
||||||
|
func as_hex(b []byte, i int) int {
|
||||||
|
bi := b[i]
|
||||||
|
if bi >= 'A' && bi <= 'F' {
|
||||||
|
return int(bi) - 'A' + 10
|
||||||
|
}
|
||||||
|
if bi >= 'a' && bi <= 'f' {
|
||||||
|
return int(bi) - 'a' + 10
|
||||||
|
}
|
||||||
|
return int(bi) - '0'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character is ASCII.
|
||||||
|
func is_ascii(b []byte, i int) bool {
|
||||||
|
return b[i] <= 0x7F
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character at the start of the buffer can be printed unescaped.
|
||||||
|
func is_printable(b []byte, i int) bool {
|
||||||
|
return ((b[i] == 0x0A) || // . == #x0A
|
||||||
|
(b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E
|
||||||
|
(b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF
|
||||||
|
(b[i] > 0xC2 && b[i] < 0xED) ||
|
||||||
|
(b[i] == 0xED && b[i+1] < 0xA0) ||
|
||||||
|
(b[i] == 0xEE) ||
|
||||||
|
(b[i] == 0xEF && // #xE000 <= . <= #xFFFD
|
||||||
|
!(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF
|
||||||
|
!(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF))))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character at the specified position is NUL.
|
||||||
|
func is_z(b []byte, i int) bool {
|
||||||
|
return b[i] == 0x00
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the beginning of the buffer is a BOM.
|
||||||
|
func is_bom(b []byte, i int) bool {
|
||||||
|
return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character at the specified position is space.
|
||||||
|
func is_space(b []byte, i int) bool {
|
||||||
|
return b[i] == ' '
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character at the specified position is tab.
|
||||||
|
func is_tab(b []byte, i int) bool {
|
||||||
|
return b[i] == '\t'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character at the specified position is blank (space or tab).
|
||||||
|
func is_blank(b []byte, i int) bool {
|
||||||
|
//return is_space(b, i) || is_tab(b, i)
|
||||||
|
return b[i] == ' ' || b[i] == '\t'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character at the specified position is a line break.
|
||||||
|
func is_break(b []byte, i int) bool {
|
||||||
|
return (b[i] == '\r' || // CR (#xD)
|
||||||
|
b[i] == '\n' || // LF (#xA)
|
||||||
|
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||||
|
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||||
|
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029)
|
||||||
|
}
|
||||||
|
|
||||||
|
func is_crlf(b []byte, i int) bool {
|
||||||
|
return b[i] == '\r' && b[i+1] == '\n'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character is a line break or NUL.
|
||||||
|
func is_breakz(b []byte, i int) bool {
|
||||||
|
//return is_break(b, i) || is_z(b, i)
|
||||||
|
return ( // is_break:
|
||||||
|
b[i] == '\r' || // CR (#xD)
|
||||||
|
b[i] == '\n' || // LF (#xA)
|
||||||
|
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||||
|
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||||
|
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
|
||||||
|
// is_z:
|
||||||
|
b[i] == 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character is a line break, space, or NUL.
|
||||||
|
func is_spacez(b []byte, i int) bool {
|
||||||
|
//return is_space(b, i) || is_breakz(b, i)
|
||||||
|
return ( // is_space:
|
||||||
|
b[i] == ' ' ||
|
||||||
|
// is_breakz:
|
||||||
|
b[i] == '\r' || // CR (#xD)
|
||||||
|
b[i] == '\n' || // LF (#xA)
|
||||||
|
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||||
|
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||||
|
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
|
||||||
|
b[i] == 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the character is a line break, space, tab, or NUL.
|
||||||
|
func is_blankz(b []byte, i int) bool {
|
||||||
|
//return is_blank(b, i) || is_breakz(b, i)
|
||||||
|
return ( // is_blank:
|
||||||
|
b[i] == ' ' || b[i] == '\t' ||
|
||||||
|
// is_breakz:
|
||||||
|
b[i] == '\r' || // CR (#xD)
|
||||||
|
b[i] == '\n' || // LF (#xA)
|
||||||
|
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||||
|
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||||
|
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
|
||||||
|
b[i] == 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the width of the character.
|
||||||
|
func width(b byte) int {
|
||||||
|
// Don't replace these by a switch without first
|
||||||
|
// confirming that it is being inlined.
|
||||||
|
if b&0x80 == 0x00 {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
if b&0xE0 == 0xC0 {
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
if b&0xF0 == 0xE0 {
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
if b&0xF8 == 0xF0 {
|
||||||
|
return 4
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user