2019-10-09 19:51:26 +03:00
|
|
|
package dnsfilter
|
|
|
|
|
|
|
|
import (
|
2019-10-16 12:57:49 +03:00
|
|
|
"bytes"
|
|
|
|
"crypto/sha256"
|
|
|
|
"encoding/binary"
|
|
|
|
"encoding/hex"
|
2019-10-09 19:51:26 +03:00
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2019-10-16 12:57:49 +03:00
|
|
|
"net"
|
2019-10-09 19:51:26 +03:00
|
|
|
"net/http"
|
2020-08-13 11:49:42 +03:00
|
|
|
"sort"
|
2019-10-09 19:51:26 +03:00
|
|
|
"strings"
|
2019-10-16 12:57:49 +03:00
|
|
|
"time"
|
2019-10-09 19:51:26 +03:00
|
|
|
|
2019-10-16 12:57:49 +03:00
|
|
|
"github.com/AdguardTeam/dnsproxy/upstream"
|
|
|
|
"github.com/AdguardTeam/golibs/cache"
|
2019-10-09 19:51:26 +03:00
|
|
|
"github.com/AdguardTeam/golibs/log"
|
2019-10-16 12:57:49 +03:00
|
|
|
"github.com/miekg/dns"
|
|
|
|
"golang.org/x/net/publicsuffix"
|
2019-10-09 19:51:26 +03:00
|
|
|
)
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
// Safe browsing and parental control methods.
|
|
|
|
|
2020-11-06 12:15:08 +03:00
|
|
|
const (
|
|
|
|
dnsTimeout = 3 * time.Second
|
|
|
|
defaultSafebrowsingServer = `https://dns-family.adguard.com/dns-query`
|
|
|
|
defaultParentalServer = `https://dns-family.adguard.com/dns-query`
|
|
|
|
sbTXTSuffix = `sb.dns.adguard.com.`
|
|
|
|
pcTXTSuffix = `pc.dns.adguard.com.`
|
|
|
|
)
|
2019-10-16 12:57:49 +03:00
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) initSecurityServices() error {
|
2019-10-16 12:57:49 +03:00
|
|
|
var err error
|
|
|
|
d.safeBrowsingServer = defaultSafebrowsingServer
|
|
|
|
d.parentalServer = defaultParentalServer
|
2020-08-13 11:49:42 +03:00
|
|
|
opts := upstream.Options{
|
|
|
|
Timeout: dnsTimeout,
|
|
|
|
ServerIPAddrs: []net.IP{
|
2020-09-28 11:39:41 +03:00
|
|
|
net.ParseIP("94.140.14.15"),
|
|
|
|
net.ParseIP("94.140.15.16"),
|
|
|
|
net.ParseIP("2a10:50c0::bad1:ff"),
|
|
|
|
net.ParseIP("2a10:50c0::bad2:ff"),
|
2020-08-13 11:49:42 +03:00
|
|
|
},
|
|
|
|
}
|
2019-10-16 12:57:49 +03:00
|
|
|
|
2019-12-13 17:42:01 +03:00
|
|
|
d.parentalUpstream, err = upstream.AddressToUpstream(d.parentalServer, opts)
|
2019-10-16 12:57:49 +03:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-12-13 17:42:01 +03:00
|
|
|
d.safeBrowsingUpstream, err = upstream.AddressToUpstream(d.safeBrowsingServer, opts)
|
2019-10-16 12:57:49 +03:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
expire byte[4]
|
2020-08-13 11:49:42 +03:00
|
|
|
hash byte[32]
|
|
|
|
...
|
2019-10-16 12:57:49 +03:00
|
|
|
*/
|
2020-11-06 12:15:08 +03:00
|
|
|
func (c *sbCtx) setCache(prefix, hashes []byte) {
|
2020-08-13 11:49:42 +03:00
|
|
|
d := make([]byte, 4+len(hashes))
|
|
|
|
expire := uint(time.Now().Unix()) + c.cacheTime*60
|
|
|
|
binary.BigEndian.PutUint32(d[:4], uint32(expire))
|
|
|
|
copy(d[4:], hashes)
|
|
|
|
c.cache.Set(prefix, d)
|
|
|
|
log.Debug("%s: stored in cache: %v", c.svc, prefix)
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
2020-11-20 17:32:41 +03:00
|
|
|
// findInHash returns 32-byte hash if it's found in hashToHost.
|
|
|
|
func (c *sbCtx) findInHash(val []byte) (hash32 [32]byte, found bool) {
|
|
|
|
for i := 4; i < len(val); i += 32 {
|
|
|
|
hash := val[i : i+32]
|
|
|
|
|
|
|
|
copy(hash32[:], hash[0:32])
|
|
|
|
|
|
|
|
_, found = c.hashToHost[hash32]
|
|
|
|
if found {
|
|
|
|
return hash32, found
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return [32]byte{}, false
|
|
|
|
}
|
|
|
|
|
2020-08-13 11:49:42 +03:00
|
|
|
func (c *sbCtx) getCached() int {
|
|
|
|
now := time.Now().Unix()
|
|
|
|
hashesToRequest := map[[32]byte]string{}
|
|
|
|
for k, v := range c.hashToHost {
|
|
|
|
key := k[0:2]
|
|
|
|
val := c.cache.Get(key)
|
2020-11-20 17:32:41 +03:00
|
|
|
if val == nil || now >= int64(binary.BigEndian.Uint32(val)) {
|
2020-08-13 11:49:42 +03:00
|
|
|
hashesToRequest[k] = v
|
2020-11-20 17:32:41 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
if hash32, found := c.findInHash(val); found {
|
|
|
|
log.Debug("%s: found in cache: %s: blocked by %v", c.svc, c.host, hash32)
|
|
|
|
return 1
|
2020-08-13 11:49:42 +03:00
|
|
|
}
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
2020-08-13 11:49:42 +03:00
|
|
|
if len(hashesToRequest) == 0 {
|
|
|
|
log.Debug("%s: found in cache: %s: not blocked", c.svc, c.host)
|
|
|
|
return -1
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
2020-08-13 11:49:42 +03:00
|
|
|
c.hashToHost = hashesToRequest
|
|
|
|
return 0
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
2020-08-13 11:49:42 +03:00
|
|
|
type sbCtx struct {
|
|
|
|
host string
|
|
|
|
svc string
|
|
|
|
hashToHost map[[32]byte]string
|
|
|
|
cache cache.Cache
|
|
|
|
cacheTime uint
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
2020-08-13 11:49:42 +03:00
|
|
|
func hostnameToHashes(host string) map[[32]byte]string {
|
|
|
|
hashes := map[[32]byte]string{}
|
2019-10-16 12:57:49 +03:00
|
|
|
tld, icann := publicsuffix.PublicSuffix(host)
|
|
|
|
if !icann {
|
|
|
|
// private suffixes like cloudfront.net
|
|
|
|
tld = ""
|
|
|
|
}
|
|
|
|
curhost := host
|
2020-06-23 12:39:19 +03:00
|
|
|
|
|
|
|
nDots := 0
|
|
|
|
for i := len(curhost) - 1; i >= 0; i-- {
|
|
|
|
if curhost[i] == '.' {
|
|
|
|
nDots++
|
|
|
|
if nDots == 4 {
|
|
|
|
curhost = curhost[i+1:] // "xxx.a.b.c.d" -> "a.b.c.d"
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-16 12:57:49 +03:00
|
|
|
for {
|
|
|
|
if curhost == "" {
|
|
|
|
// we've reached end of string
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if tld != "" && curhost == tld {
|
|
|
|
// we've reached the TLD, don't hash it
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
sum := sha256.Sum256([]byte(curhost))
|
2020-08-13 11:49:42 +03:00
|
|
|
hashes[sum] = curhost
|
2019-10-16 12:57:49 +03:00
|
|
|
|
|
|
|
pos := strings.IndexByte(curhost, byte('.'))
|
|
|
|
if pos < 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
curhost = curhost[pos+1:]
|
|
|
|
}
|
2020-08-13 11:49:42 +03:00
|
|
|
return hashes
|
|
|
|
}
|
|
|
|
|
|
|
|
// convert hash array to string
|
|
|
|
func (c *sbCtx) getQuestion() string {
|
2020-11-06 12:15:08 +03:00
|
|
|
b := &strings.Builder{}
|
|
|
|
encoder := hex.NewEncoder(b)
|
|
|
|
|
2020-08-13 11:49:42 +03:00
|
|
|
for hash := range c.hashToHost {
|
2020-11-06 12:15:08 +03:00
|
|
|
// Ignore errors, since strings.(*Buffer).Write never returns
|
|
|
|
// errors.
|
|
|
|
//
|
|
|
|
// TODO(e.burkov, a.garipov): Find out and document why exactly
|
|
|
|
// this slice.
|
|
|
|
_, _ = encoder.Write(hash[0:2])
|
|
|
|
_, _ = b.WriteRune('.')
|
2020-08-13 11:49:42 +03:00
|
|
|
}
|
2020-11-06 12:15:08 +03:00
|
|
|
|
2020-08-13 11:49:42 +03:00
|
|
|
if c.svc == "SafeBrowsing" {
|
2020-11-06 12:15:08 +03:00
|
|
|
// See comment above.
|
|
|
|
_, _ = b.WriteString(sbTXTSuffix)
|
|
|
|
return b.String()
|
2020-08-13 11:49:42 +03:00
|
|
|
}
|
2020-11-06 12:15:08 +03:00
|
|
|
|
|
|
|
// See comment above.
|
|
|
|
_, _ = b.WriteString(pcTXTSuffix)
|
|
|
|
return b.String()
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Find the target hash in TXT response
|
2020-08-13 11:49:42 +03:00
|
|
|
func (c *sbCtx) processTXT(resp *dns.Msg) (bool, [][]byte) {
|
|
|
|
matched := false
|
|
|
|
hashes := [][]byte{}
|
2019-10-16 12:57:49 +03:00
|
|
|
for _, a := range resp.Answer {
|
|
|
|
txt, ok := a.(*dns.TXT)
|
|
|
|
if !ok {
|
|
|
|
continue
|
|
|
|
}
|
2020-08-13 11:49:42 +03:00
|
|
|
log.Debug("%s: received hashes for %s: %v", c.svc, c.host, txt.Txt)
|
|
|
|
|
2019-10-16 12:57:49 +03:00
|
|
|
for _, t := range txt.Txt {
|
2020-08-13 11:49:42 +03:00
|
|
|
|
|
|
|
if len(t) != 32*2 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
hash, err := hex.DecodeString(t)
|
|
|
|
if err != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
hashes = append(hashes, hash)
|
|
|
|
|
|
|
|
if !matched {
|
|
|
|
var hash32 [32]byte
|
|
|
|
copy(hash32[:], hash)
|
|
|
|
hashHost, ok := c.hashToHost[hash32]
|
|
|
|
if ok {
|
|
|
|
log.Debug("%s: matched %s by %s/%s", c.svc, c.host, hashHost, t)
|
|
|
|
matched = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return matched, hashes
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *sbCtx) storeCache(hashes [][]byte) {
|
|
|
|
sort.Slice(hashes, func(a, b int) bool {
|
|
|
|
return bytes.Compare(hashes[a], hashes[b]) < 0
|
|
|
|
})
|
|
|
|
|
|
|
|
var curData []byte
|
|
|
|
var prevPrefix []byte
|
|
|
|
for i, hash := range hashes {
|
|
|
|
prefix := hash[0:2]
|
|
|
|
if !bytes.Equal(prefix, prevPrefix) {
|
|
|
|
if i != 0 {
|
|
|
|
c.setCache(prevPrefix, curData)
|
|
|
|
curData = nil
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
2020-08-13 11:49:42 +03:00
|
|
|
prevPrefix = hashes[i][0:2]
|
|
|
|
}
|
|
|
|
curData = append(curData, hash...)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(prevPrefix) != 0 {
|
|
|
|
c.setCache(prevPrefix, curData)
|
|
|
|
}
|
|
|
|
|
|
|
|
for hash := range c.hashToHost {
|
|
|
|
prefix := hash[0:2]
|
|
|
|
val := c.cache.Get(prefix)
|
|
|
|
if val == nil {
|
|
|
|
c.setCache(prefix, nil)
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-20 17:32:41 +03:00
|
|
|
func check(c *sbCtx, r Result, u upstream.Upstream) (Result, error) {
|
|
|
|
c.hashToHost = hostnameToHashes(c.host)
|
|
|
|
switch c.getCached() {
|
|
|
|
case -1:
|
|
|
|
return Result{}, nil
|
|
|
|
case 1:
|
|
|
|
return r, nil
|
2020-08-13 11:49:42 +03:00
|
|
|
}
|
2019-10-16 12:57:49 +03:00
|
|
|
|
2020-08-13 11:49:42 +03:00
|
|
|
question := c.getQuestion()
|
2019-10-16 12:57:49 +03:00
|
|
|
|
2020-11-20 17:32:41 +03:00
|
|
|
log.Tracef("%s: checking %s: %s", c.svc, c.host, question)
|
|
|
|
req := (&dns.Msg{}).SetQuestion(question, dns.TypeTXT)
|
|
|
|
|
|
|
|
resp, err := u.Exchange(req)
|
2019-10-16 12:57:49 +03:00
|
|
|
if err != nil {
|
2020-11-20 17:32:41 +03:00
|
|
|
return Result{}, err
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
2020-08-13 11:49:42 +03:00
|
|
|
matched, receivedHashes := c.processTXT(resp)
|
2020-11-20 17:32:41 +03:00
|
|
|
|
|
|
|
c.storeCache(receivedHashes)
|
2020-08-13 11:49:42 +03:00
|
|
|
if matched {
|
2020-11-20 17:32:41 +03:00
|
|
|
return r, nil
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
2020-11-20 17:32:41 +03:00
|
|
|
return Result{}, nil
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) checkSafeBrowsing(host string) (Result, error) {
|
2019-10-16 12:57:49 +03:00
|
|
|
if log.GetLevel() >= log.DEBUG {
|
|
|
|
timer := log.StartTimer()
|
2020-11-20 17:32:41 +03:00
|
|
|
defer timer.LogElapsed("SafeBrowsing lookup for %s", host)
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
2020-11-20 17:32:41 +03:00
|
|
|
ctx := &sbCtx{
|
|
|
|
host: host,
|
|
|
|
svc: "SafeBrowsing",
|
|
|
|
cache: gctx.safebrowsingCache,
|
|
|
|
cacheTime: d.Config.CacheTime,
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
2020-11-20 17:32:41 +03:00
|
|
|
res := Result{
|
|
|
|
IsFiltered: true,
|
|
|
|
Reason: FilteredSafeBrowsing,
|
2020-12-17 13:32:46 +03:00
|
|
|
Rules: []*ResultRule{{
|
|
|
|
Text: "adguard-malware-shavar",
|
|
|
|
}},
|
2020-08-13 11:49:42 +03:00
|
|
|
}
|
2020-11-20 17:32:41 +03:00
|
|
|
return check(ctx, res, d.safeBrowsingUpstream)
|
|
|
|
}
|
2019-10-16 12:57:49 +03:00
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) checkParental(host string) (Result, error) {
|
2020-11-20 17:32:41 +03:00
|
|
|
if log.GetLevel() >= log.DEBUG {
|
|
|
|
timer := log.StartTimer()
|
|
|
|
defer timer.LogElapsed("Parental lookup for %s", host)
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
2020-11-20 17:32:41 +03:00
|
|
|
ctx := &sbCtx{
|
|
|
|
host: host,
|
|
|
|
svc: "Parental",
|
|
|
|
cache: gctx.parentalCache,
|
|
|
|
cacheTime: d.Config.CacheTime,
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
2020-11-20 17:32:41 +03:00
|
|
|
res := Result{
|
|
|
|
IsFiltered: true,
|
|
|
|
Reason: FilteredParental,
|
2020-12-17 13:32:46 +03:00
|
|
|
Rules: []*ResultRule{{
|
|
|
|
Text: "parental CATEGORY_BLACKLISTED",
|
|
|
|
}},
|
2020-11-20 17:32:41 +03:00
|
|
|
}
|
|
|
|
return check(ctx, res, d.parentalUpstream)
|
2019-10-16 12:57:49 +03:00
|
|
|
}
|
|
|
|
|
2019-10-09 19:51:26 +03:00
|
|
|
func httpError(r *http.Request, w http.ResponseWriter, code int, format string, args ...interface{}) {
|
|
|
|
text := fmt.Sprintf(format, args...)
|
|
|
|
log.Info("DNSFilter: %s %s: %s", r.Method, r.URL, text)
|
|
|
|
http.Error(w, text, code)
|
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) handleSafeBrowsingEnable(w http.ResponseWriter, r *http.Request) {
|
2019-10-09 19:51:26 +03:00
|
|
|
d.Config.SafeBrowsingEnabled = true
|
|
|
|
d.Config.ConfigModified()
|
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) handleSafeBrowsingDisable(w http.ResponseWriter, r *http.Request) {
|
2019-10-09 19:51:26 +03:00
|
|
|
d.Config.SafeBrowsingEnabled = false
|
|
|
|
d.Config.ConfigModified()
|
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) handleSafeBrowsingStatus(w http.ResponseWriter, r *http.Request) {
|
2019-10-09 19:51:26 +03:00
|
|
|
data := map[string]interface{}{
|
|
|
|
"enabled": d.Config.SafeBrowsingEnabled,
|
|
|
|
}
|
|
|
|
jsonVal, err := json.Marshal(data)
|
|
|
|
if err != nil {
|
|
|
|
httpError(r, w, http.StatusInternalServerError, "Unable to marshal status json: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
_, err = w.Write(jsonVal)
|
|
|
|
if err != nil {
|
|
|
|
httpError(r, w, http.StatusInternalServerError, "Unable to write response json: %s", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) handleParentalEnable(w http.ResponseWriter, r *http.Request) {
|
2019-10-09 19:51:26 +03:00
|
|
|
d.Config.ParentalEnabled = true
|
|
|
|
d.Config.ConfigModified()
|
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) handleParentalDisable(w http.ResponseWriter, r *http.Request) {
|
2019-10-09 19:51:26 +03:00
|
|
|
d.Config.ParentalEnabled = false
|
|
|
|
d.Config.ConfigModified()
|
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) handleParentalStatus(w http.ResponseWriter, r *http.Request) {
|
2019-10-09 19:51:26 +03:00
|
|
|
data := map[string]interface{}{
|
|
|
|
"enabled": d.Config.ParentalEnabled,
|
|
|
|
}
|
|
|
|
jsonVal, err := json.Marshal(data)
|
|
|
|
if err != nil {
|
|
|
|
httpError(r, w, http.StatusInternalServerError, "Unable to marshal status json: %s", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
_, err = w.Write(jsonVal)
|
|
|
|
if err != nil {
|
|
|
|
httpError(r, w, http.StatusInternalServerError, "Unable to write response json: %s", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
func (d *DNSFilter) registerSecurityHandlers() {
|
2019-10-09 19:51:26 +03:00
|
|
|
d.Config.HTTPRegister("POST", "/control/safebrowsing/enable", d.handleSafeBrowsingEnable)
|
|
|
|
d.Config.HTTPRegister("POST", "/control/safebrowsing/disable", d.handleSafeBrowsingDisable)
|
|
|
|
d.Config.HTTPRegister("GET", "/control/safebrowsing/status", d.handleSafeBrowsingStatus)
|
2019-10-16 12:57:49 +03:00
|
|
|
|
2019-10-09 19:51:26 +03:00
|
|
|
d.Config.HTTPRegister("POST", "/control/parental/enable", d.handleParentalEnable)
|
|
|
|
d.Config.HTTPRegister("POST", "/control/parental/disable", d.handleParentalDisable)
|
|
|
|
d.Config.HTTPRegister("GET", "/control/parental/status", d.handleParentalStatus)
|
2019-10-16 12:57:49 +03:00
|
|
|
|
2019-10-09 19:51:26 +03:00
|
|
|
d.Config.HTTPRegister("POST", "/control/safesearch/enable", d.handleSafeSearchEnable)
|
|
|
|
d.Config.HTTPRegister("POST", "/control/safesearch/disable", d.handleSafeSearchDisable)
|
|
|
|
d.Config.HTTPRegister("GET", "/control/safesearch/status", d.handleSafeSearchStatus)
|
|
|
|
}
|