mirror of
https://github.com/LBRYFoundation/tracker.git
synced 2025-08-23 17:47:29 +00:00
Merge pull request #338 from mrd0ll4r/fix-parsing
Fix and optimize query parsing, make parsing errors static
This commit is contained in:
commit
7ea4b3dc7a
2 changed files with 85 additions and 73 deletions
|
@ -5,6 +5,8 @@ import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
log "github.com/Sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Params is used to fetch (optional) request parameters from an Announce.
|
// Params is used to fetch (optional) request parameters from an Announce.
|
||||||
|
@ -39,6 +41,10 @@ var ErrKeyNotFound = errors.New("query: value for the provided key does not exis
|
||||||
// with invalid length.
|
// with invalid length.
|
||||||
var ErrInvalidInfohash = ClientError("provided invalid infohash")
|
var ErrInvalidInfohash = ClientError("provided invalid infohash")
|
||||||
|
|
||||||
|
// ErrInvalidQueryEscape is returned when a query string contains invalid
|
||||||
|
// escapes.
|
||||||
|
var ErrInvalidQueryEscape = ClientError("invalid query escape")
|
||||||
|
|
||||||
// QueryParams parses a URL Query and implements the Params interface with some
|
// QueryParams parses a URL Query and implements the Params interface with some
|
||||||
// additional helpers.
|
// additional helpers.
|
||||||
type QueryParams struct {
|
type QueryParams struct {
|
||||||
|
@ -88,69 +94,55 @@ func ParseURLData(urlData string) (*QueryParams, error) {
|
||||||
|
|
||||||
// parseQuery parses a URL query into QueryParams.
|
// parseQuery parses a URL query into QueryParams.
|
||||||
// The query is expected to exclude the delimiting '?'.
|
// The query is expected to exclude the delimiting '?'.
|
||||||
func parseQuery(rawQuery string) (*QueryParams, error) {
|
func parseQuery(query string) (q *QueryParams, err error) {
|
||||||
var (
|
// This is basically url.parseQuery, but with a map[string]string
|
||||||
keyStart, keyEnd int
|
// instead of map[string][]string for the values.
|
||||||
valStart, valEnd int
|
q = &QueryParams{
|
||||||
|
query: query,
|
||||||
|
infoHashes: nil,
|
||||||
|
params: make(map[string]string),
|
||||||
|
}
|
||||||
|
|
||||||
onKey = true
|
for query != "" {
|
||||||
|
key := query
|
||||||
q = &QueryParams{
|
if i := strings.IndexAny(key, "&;"); i >= 0 {
|
||||||
query: rawQuery,
|
key, query = key[:i], key[i+1:]
|
||||||
infoHashes: nil,
|
|
||||||
params: make(map[string]string),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
for i, length := 0, len(rawQuery); i < length; i++ {
|
|
||||||
separator := rawQuery[i] == '&' || rawQuery[i] == ';'
|
|
||||||
last := i == length-1
|
|
||||||
|
|
||||||
if separator || last {
|
|
||||||
if onKey && !last {
|
|
||||||
keyStart = i + 1
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if last && !separator && !onKey {
|
|
||||||
valEnd = i
|
|
||||||
}
|
|
||||||
|
|
||||||
keyStr, err := url.QueryUnescape(rawQuery[keyStart : keyEnd+1])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var valStr string
|
|
||||||
|
|
||||||
if valEnd > 0 {
|
|
||||||
valStr, err = url.QueryUnescape(rawQuery[valStart : valEnd+1])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if keyStr == "info_hash" {
|
|
||||||
if len(valStr) != 20 {
|
|
||||||
return nil, ErrInvalidInfohash
|
|
||||||
}
|
|
||||||
q.infoHashes = append(q.infoHashes, InfoHashFromString(valStr))
|
|
||||||
} else {
|
|
||||||
q.params[strings.ToLower(keyStr)] = valStr
|
|
||||||
}
|
|
||||||
|
|
||||||
valEnd = 0
|
|
||||||
onKey = true
|
|
||||||
keyStart = i + 1
|
|
||||||
|
|
||||||
} else if rawQuery[i] == '=' {
|
|
||||||
onKey = false
|
|
||||||
valStart = i + 1
|
|
||||||
valEnd = 0
|
|
||||||
} else if onKey {
|
|
||||||
keyEnd = i
|
|
||||||
} else {
|
} else {
|
||||||
valEnd = i
|
query = ""
|
||||||
|
}
|
||||||
|
if key == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
value := ""
|
||||||
|
if i := strings.Index(key, "="); i >= 0 {
|
||||||
|
key, value = key[:i], key[i+1:]
|
||||||
|
}
|
||||||
|
key, err = url.QueryUnescape(key)
|
||||||
|
if err != nil {
|
||||||
|
// QueryUnescape returns an error like "invalid escape: '%x'".
|
||||||
|
// But frontends record these errors to prometheus, which generates
|
||||||
|
// a lot of time series.
|
||||||
|
// We log it here for debugging instead.
|
||||||
|
log.WithFields(log.Fields{"error": err}).Debug("failed to unescape query param key")
|
||||||
|
return nil, ErrInvalidQueryEscape
|
||||||
|
}
|
||||||
|
value, err = url.QueryUnescape(value)
|
||||||
|
if err != nil {
|
||||||
|
// QueryUnescape returns an error like "invalid escape: '%x'".
|
||||||
|
// But frontends record these errors to prometheus, which generates
|
||||||
|
// a lot of time series.
|
||||||
|
// We log it here for debugging instead.
|
||||||
|
log.WithFields(log.Fields{"error": err}).Debug("failed to unescape query param value")
|
||||||
|
return nil, ErrInvalidQueryEscape
|
||||||
|
}
|
||||||
|
|
||||||
|
if key == "info_hash" {
|
||||||
|
if len(value) != 20 {
|
||||||
|
return nil, ErrInvalidInfohash
|
||||||
|
}
|
||||||
|
q.infoHashes = append(q.infoHashes, InfoHashFromString(value))
|
||||||
|
} else {
|
||||||
|
q.params[strings.ToLower(key)] = value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,12 @@ var (
|
||||||
InvalidQueries = []string{
|
InvalidQueries = []string{
|
||||||
"/announce?" + "info_hash=%0%a",
|
"/announce?" + "info_hash=%0%a",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// See https://github.com/chihaya/chihaya/issues/334.
|
||||||
|
shouldNotPanicQueries = []string{
|
||||||
|
"/annnounce?" + "info_hash=" + testPeerID + "&a",
|
||||||
|
"/annnounce?" + "info_hash=" + testPeerID + "&=b?",
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
func mapArrayEqual(boxed map[string][]string, unboxed map[string]string) bool {
|
func mapArrayEqual(boxed map[string][]string, unboxed map[string]string) bool {
|
||||||
|
@ -84,26 +90,40 @@ func TestParseInvalidURLData(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseShouldNotPanicURLData(t *testing.T) {
|
||||||
|
for _, parseStr := range shouldNotPanicQueries {
|
||||||
|
ParseURLData(parseStr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func BenchmarkParseQuery(b *testing.B) {
|
func BenchmarkParseQuery(b *testing.B) {
|
||||||
|
announceStrings := make([]string, 0)
|
||||||
|
for i := range ValidAnnounceArguments {
|
||||||
|
announceStrings = append(announceStrings, ValidAnnounceArguments[i].Encode())
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
for bCount := 0; bCount < b.N; bCount++ {
|
for bCount := 0; bCount < b.N; bCount++ {
|
||||||
for parseIndex, parseStr := range ValidAnnounceArguments {
|
i := bCount % len(announceStrings)
|
||||||
parsedQueryObj, err := parseQuery(parseStr.Encode())
|
parsedQueryObj, err := parseQuery(announceStrings[i])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Error(err, parseIndex)
|
b.Error(err, i)
|
||||||
b.Log(parsedQueryObj)
|
b.Log(parsedQueryObj)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkURLParseQuery(b *testing.B) {
|
func BenchmarkURLParseQuery(b *testing.B) {
|
||||||
|
announceStrings := make([]string, 0)
|
||||||
|
for i := range ValidAnnounceArguments {
|
||||||
|
announceStrings = append(announceStrings, ValidAnnounceArguments[i].Encode())
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
for bCount := 0; bCount < b.N; bCount++ {
|
for bCount := 0; bCount < b.N; bCount++ {
|
||||||
for parseIndex, parseStr := range ValidAnnounceArguments {
|
i := bCount % len(announceStrings)
|
||||||
parsedQueryObj, err := url.ParseQuery(parseStr.Encode())
|
parsedQueryObj, err := url.ParseQuery(announceStrings[i])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Error(err, parseIndex)
|
b.Error(err, i)
|
||||||
b.Log(parsedQueryObj)
|
b.Log(parsedQueryObj)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue