2022-09-23 13:23:35 +03:00
|
|
|
package filtering
|
2019-08-21 14:39:37 +03:00
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2019-11-06 15:13:31 +03:00
|
|
|
"net"
|
2019-08-21 14:39:37 +03:00
|
|
|
"net/http"
|
2019-09-04 14:12:00 +03:00
|
|
|
"net/url"
|
2019-08-21 14:39:37 +03:00
|
|
|
"os"
|
2020-03-05 14:37:43 +03:00
|
|
|
"path/filepath"
|
2023-05-03 19:52:06 +03:00
|
|
|
"sync"
|
2019-09-04 14:12:00 +03:00
|
|
|
"time"
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2021-12-16 20:54:59 +03:00
|
|
|
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
2022-08-17 20:40:47 +03:00
|
|
|
"github.com/AdguardTeam/golibs/errors"
|
2019-08-21 14:39:37 +03:00
|
|
|
"github.com/AdguardTeam/golibs/log"
|
2019-11-06 15:13:31 +03:00
|
|
|
"github.com/miekg/dns"
|
2023-03-29 19:09:54 +03:00
|
|
|
"golang.org/x/exp/slices"
|
2019-08-21 14:39:37 +03:00
|
|
|
)
|
|
|
|
|
2021-03-15 14:19:04 +03:00
|
|
|
// validateFilterURL validates the filter list URL or file name.
|
|
|
|
func validateFilterURL(urlStr string) (err error) {
|
2023-03-29 19:09:54 +03:00
|
|
|
defer func() { err = errors.Annotate(err, "checking filter: %w") }()
|
|
|
|
|
2021-03-15 14:19:04 +03:00
|
|
|
if filepath.IsAbs(urlStr) {
|
|
|
|
_, err = os.Stat(urlStr)
|
|
|
|
if err != nil {
|
2023-03-29 19:09:54 +03:00
|
|
|
// Don't wrap the error since it's informative enough as is.
|
|
|
|
return err
|
2021-03-15 14:19:04 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2020-03-05 14:37:43 +03:00
|
|
|
}
|
|
|
|
|
2023-03-29 19:09:54 +03:00
|
|
|
u, err := url.ParseRequestURI(urlStr)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2023-03-29 19:09:54 +03:00
|
|
|
// Don't wrap the error since it's informative enough as is.
|
|
|
|
return err
|
|
|
|
} else if s := u.Scheme; s != aghhttp.SchemeHTTP && s != aghhttp.SchemeHTTPS {
|
|
|
|
return &url.Error{
|
|
|
|
Op: "Check scheme",
|
|
|
|
URL: urlStr,
|
|
|
|
Err: fmt.Errorf("only %v allowed", []string{aghhttp.SchemeHTTP, aghhttp.SchemeHTTPS}),
|
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
2021-03-15 14:19:04 +03:00
|
|
|
|
|
|
|
return nil
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
type filterAddJSON struct {
|
2020-02-26 19:58:25 +03:00
|
|
|
Name string `json:"name"`
|
|
|
|
URL string `json:"url"`
|
|
|
|
Whitelist bool `json:"whitelist"`
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringAddURL(w http.ResponseWriter, r *http.Request) {
|
2019-09-04 14:12:00 +03:00
|
|
|
fj := filterAddJSON{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&fj)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "Failed to parse request body json: %s", err)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-03-15 14:19:04 +03:00
|
|
|
err = validateFilterURL(fj.URL)
|
|
|
|
if err != nil {
|
2022-08-17 20:40:47 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "%s", err)
|
2021-03-15 14:19:04 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check for duplicates
|
2022-09-23 13:23:35 +03:00
|
|
|
if d.filterExists(fj.URL) {
|
2023-03-29 19:09:54 +03:00
|
|
|
err = errFilterExists
|
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "Filter with URL %q: %s", fj.URL, err)
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set necessary properties
|
2022-09-23 13:23:35 +03:00
|
|
|
filt := FilterYAML{
|
2019-09-04 14:12:00 +03:00
|
|
|
Enabled: true,
|
|
|
|
URL: fj.URL,
|
|
|
|
Name: fj.Name,
|
2020-02-26 19:58:25 +03:00
|
|
|
white: fj.Whitelist,
|
2022-10-21 20:14:43 +03:00
|
|
|
Filter: Filter{
|
|
|
|
ID: assignUniqueFilterID(),
|
|
|
|
},
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
|
|
|
|
// Download the filter contents
|
2022-09-23 13:23:35 +03:00
|
|
|
ok, err := d.update(&filt)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(
|
|
|
|
r,
|
|
|
|
w,
|
|
|
|
http.StatusBadRequest,
|
2023-07-07 18:27:33 +03:00
|
|
|
"Couldn't fetch filter from URL %q: %s",
|
2021-12-16 20:54:59 +03:00
|
|
|
filt.URL,
|
|
|
|
err,
|
|
|
|
)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
if !ok {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(
|
|
|
|
r,
|
|
|
|
w,
|
|
|
|
http.StatusBadRequest,
|
2023-03-29 19:09:54 +03:00
|
|
|
"Filter with URL %q is invalid (maybe it points to blank page?)",
|
2021-12-16 20:54:59 +03:00
|
|
|
filt.URL,
|
|
|
|
)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-12-16 20:54:59 +03:00
|
|
|
// URL is assumed valid so append it to filters, update config, write new
|
|
|
|
// file and reload it to engines.
|
2023-03-29 19:09:54 +03:00
|
|
|
err = d.filterAdd(filt)
|
|
|
|
if err != nil {
|
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "Filter with URL %q: %s", filt.URL, err)
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
|
|
|
d.EnableFilters(true)
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2020-03-17 15:00:40 +03:00
|
|
|
_, err = fmt.Fprintf(w, "OK %d rules\n", filt.RulesCount)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusInternalServerError, "Couldn't write body: %s", err)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringRemoveURL(w http.ResponseWriter, r *http.Request) {
|
2019-08-21 14:39:37 +03:00
|
|
|
type request struct {
|
2020-02-26 19:58:25 +03:00
|
|
|
URL string `json:"url"`
|
|
|
|
Whitelist bool `json:"whitelist"`
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
2020-11-05 13:59:57 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
req := request{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&req)
|
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "failed to parse request body json: %s", err)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
var deleted FilterYAML
|
2023-03-29 19:09:54 +03:00
|
|
|
func() {
|
|
|
|
d.filtersMu.Lock()
|
|
|
|
defer d.filtersMu.Unlock()
|
|
|
|
|
|
|
|
filters := &d.Filters
|
|
|
|
if req.Whitelist {
|
|
|
|
filters = &d.WhitelistFilters
|
|
|
|
}
|
|
|
|
|
|
|
|
delIdx := slices.IndexFunc(*filters, func(flt FilterYAML) bool {
|
|
|
|
return flt.URL == req.URL
|
|
|
|
})
|
|
|
|
if delIdx == -1 {
|
|
|
|
log.Error("deleting filter with url %q: %s", req.URL, errFilterNotExist)
|
2020-11-05 13:59:57 +03:00
|
|
|
|
2023-03-29 19:09:54 +03:00
|
|
|
return
|
2020-11-05 13:59:57 +03:00
|
|
|
}
|
|
|
|
|
2023-03-29 19:09:54 +03:00
|
|
|
deleted = (*filters)[delIdx]
|
|
|
|
p := deleted.Path(d.DataDir)
|
|
|
|
err = os.Rename(p, p+".old")
|
2023-06-22 16:09:55 +03:00
|
|
|
if err != nil && !errors.Is(err, os.ErrNotExist) {
|
2023-03-29 19:09:54 +03:00
|
|
|
log.Error("deleting filter %d: renaming file %q: %s", deleted.ID, p, err)
|
|
|
|
|
|
|
|
return
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
2020-11-05 13:59:57 +03:00
|
|
|
|
2023-03-29 19:09:54 +03:00
|
|
|
*filters = slices.Delete(*filters, delIdx, delIdx+1)
|
|
|
|
|
|
|
|
log.Info("deleted filter %d", deleted.ID)
|
|
|
|
}()
|
2019-10-09 19:51:26 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
|
|
|
d.EnableFilters(true)
|
2019-10-09 19:51:26 +03:00
|
|
|
|
2020-11-05 13:59:57 +03:00
|
|
|
// NOTE: The old files "filter.txt.old" aren't deleted. It's not really
|
|
|
|
// necessary, but will require the additional complicated code to run
|
|
|
|
// after enableFilters is done.
|
|
|
|
//
|
|
|
|
// TODO(a.garipov): Make sure the above comment is true.
|
|
|
|
|
|
|
|
_, err = fmt.Fprintf(w, "OK %d rules\n", deleted.RulesCount)
|
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusInternalServerError, "couldn't write body: %s", err)
|
2020-11-05 13:59:57 +03:00
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2022-08-17 20:40:47 +03:00
|
|
|
type filterURLReqData struct {
|
2019-11-06 15:56:29 +03:00
|
|
|
Name string `json:"name"`
|
2019-09-04 14:12:00 +03:00
|
|
|
URL string `json:"url"`
|
|
|
|
Enabled bool `json:"enabled"`
|
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2019-11-06 15:56:29 +03:00
|
|
|
type filterURLReq struct {
|
2022-08-17 20:40:47 +03:00
|
|
|
Data *filterURLReqData `json:"data"`
|
|
|
|
URL string `json:"url"`
|
|
|
|
Whitelist bool `json:"whitelist"`
|
2019-11-06 15:56:29 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringSetURL(w http.ResponseWriter, r *http.Request) {
|
2019-11-06 15:56:29 +03:00
|
|
|
fj := filterURLReq{}
|
2019-09-04 14:12:00 +03:00
|
|
|
err := json.NewDecoder(r.Body).Decode(&fj)
|
|
|
|
if err != nil {
|
2022-09-23 13:23:35 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "decoding request: %s", err)
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-08-17 20:40:47 +03:00
|
|
|
if fj.Data == nil {
|
2022-09-23 13:23:35 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "%s", errors.Error("data is absent"))
|
2022-08-17 20:40:47 +03:00
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-03-15 14:19:04 +03:00
|
|
|
err = validateFilterURL(fj.Data.URL)
|
|
|
|
if err != nil {
|
2022-09-23 13:23:35 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "invalid url: %s", err)
|
2021-03-15 14:19:04 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
filt := FilterYAML{
|
2019-11-06 15:56:29 +03:00
|
|
|
Enabled: fj.Data.Enabled,
|
|
|
|
Name: fj.Data.Name,
|
|
|
|
URL: fj.Data.URL,
|
|
|
|
}
|
2022-09-23 13:23:35 +03:00
|
|
|
|
2022-10-21 20:14:43 +03:00
|
|
|
restart, err := d.filterSetProperties(fj.URL, filt, fj.Whitelist)
|
|
|
|
if err != nil {
|
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, err.Error())
|
2022-09-23 13:23:35 +03:00
|
|
|
|
2019-11-06 15:56:29 +03:00
|
|
|
return
|
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
2020-03-11 19:17:46 +03:00
|
|
|
if restart {
|
2022-09-23 13:23:35 +03:00
|
|
|
d.EnableFilters(true)
|
2019-11-06 15:56:29 +03:00
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2022-09-29 19:04:26 +03:00
|
|
|
// filteringRulesReq is the JSON structure for settings custom filtering rules.
|
|
|
|
type filteringRulesReq struct {
|
|
|
|
Rules []string `json:"rules"`
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringSetRules(w http.ResponseWriter, r *http.Request) {
|
2022-09-29 19:04:26 +03:00
|
|
|
if aghhttp.WriteTextPlainDeprecated(w, r) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
req := &filteringRulesReq{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(req)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2022-09-29 19:04:26 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "reading req: %s", err)
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-29 19:04:26 +03:00
|
|
|
d.UserRules = req.Rules
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
|
|
|
d.EnableFilters(true)
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringRefresh(w http.ResponseWriter, r *http.Request) {
|
2020-02-28 12:40:16 +03:00
|
|
|
type Req struct {
|
|
|
|
White bool `json:"whitelist"`
|
|
|
|
}
|
2020-02-26 19:58:25 +03:00
|
|
|
var err error
|
|
|
|
|
2020-02-28 12:40:16 +03:00
|
|
|
req := Req{}
|
|
|
|
err = json.NewDecoder(r.Body).Decode(&req)
|
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "json decode: %s", err)
|
|
|
|
|
2020-02-28 12:40:16 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
var ok bool
|
2023-03-29 19:09:54 +03:00
|
|
|
resp := struct {
|
|
|
|
Updated int `json:"updated"`
|
|
|
|
}{}
|
2022-09-23 13:23:35 +03:00
|
|
|
resp.Updated, _, ok = d.tryRefreshFilters(!req.White, req.White, true)
|
|
|
|
if !ok {
|
|
|
|
aghhttp.Error(
|
|
|
|
r,
|
|
|
|
w,
|
|
|
|
http.StatusInternalServerError,
|
|
|
|
"filters update procedure is already running",
|
|
|
|
)
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-10-10 17:12:32 +03:00
|
|
|
return
|
|
|
|
}
|
2020-02-26 19:58:25 +03:00
|
|
|
|
2022-10-04 14:35:10 +03:00
|
|
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
type filterJSON struct {
|
|
|
|
URL string `json:"url"`
|
|
|
|
Name string `json:"name"`
|
2022-03-12 14:46:15 +03:00
|
|
|
LastUpdated string `json:"last_updated,omitempty"`
|
2022-08-17 20:40:47 +03:00
|
|
|
ID int64 `json:"id"`
|
|
|
|
RulesCount uint32 `json:"rules_count"`
|
|
|
|
Enabled bool `json:"enabled"`
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
type filteringConfig struct {
|
2020-02-26 19:58:25 +03:00
|
|
|
Filters []filterJSON `json:"filters"`
|
|
|
|
WhitelistFilters []filterJSON `json:"whitelist_filters"`
|
|
|
|
UserRules []string `json:"user_rules"`
|
2022-08-17 20:40:47 +03:00
|
|
|
Interval uint32 `json:"interval"` // in hours
|
|
|
|
Enabled bool `json:"enabled"`
|
2020-02-26 19:58:25 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func filterToJSON(f FilterYAML) filterJSON {
|
2020-02-26 19:58:25 +03:00
|
|
|
fj := filterJSON{
|
|
|
|
ID: f.ID,
|
|
|
|
Enabled: f.Enabled,
|
|
|
|
URL: f.URL,
|
|
|
|
Name: f.Name,
|
|
|
|
RulesCount: uint32(f.RulesCount),
|
|
|
|
}
|
|
|
|
|
|
|
|
if !f.LastUpdated.IsZero() {
|
|
|
|
fj.LastUpdated = f.LastUpdated.Format(time.RFC3339)
|
|
|
|
}
|
|
|
|
|
|
|
|
return fj
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get filtering configuration
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringStatus(w http.ResponseWriter, r *http.Request) {
|
2019-09-04 14:12:00 +03:00
|
|
|
resp := filteringConfig{}
|
2022-09-23 13:23:35 +03:00
|
|
|
d.filtersMu.RLock()
|
|
|
|
resp.Enabled = d.FilteringEnabled
|
|
|
|
resp.Interval = d.FiltersUpdateIntervalHours
|
|
|
|
for _, f := range d.Filters {
|
2020-02-26 19:58:25 +03:00
|
|
|
fj := filterToJSON(f)
|
2019-09-04 14:12:00 +03:00
|
|
|
resp.Filters = append(resp.Filters, fj)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
2022-09-23 13:23:35 +03:00
|
|
|
for _, f := range d.WhitelistFilters {
|
2020-02-26 19:58:25 +03:00
|
|
|
fj := filterToJSON(f)
|
|
|
|
resp.WhitelistFilters = append(resp.WhitelistFilters, fj)
|
|
|
|
}
|
2022-09-23 13:23:35 +03:00
|
|
|
resp.UserRules = d.UserRules
|
|
|
|
d.filtersMu.RUnlock()
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2022-10-04 14:35:10 +03:00
|
|
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
// Set filtering configuration
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringConfig(w http.ResponseWriter, r *http.Request) {
|
2019-09-04 14:12:00 +03:00
|
|
|
req := filteringConfig{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&req)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "json decode: %s", err)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
if !ValidateUpdateIvl(req.Interval) {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "Unsupported interval")
|
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-05-24 14:48:42 +03:00
|
|
|
func() {
|
2022-09-23 13:23:35 +03:00
|
|
|
d.filtersMu.Lock()
|
|
|
|
defer d.filtersMu.Unlock()
|
2021-05-24 14:48:42 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.FilteringEnabled = req.Enabled
|
|
|
|
d.FiltersUpdateIntervalHours = req.Interval
|
2021-05-24 14:48:42 +03:00
|
|
|
}()
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
|
|
|
d.EnableFilters(true)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
type checkHostRespRule struct {
|
|
|
|
Text string `json:"text"`
|
2022-08-17 20:40:47 +03:00
|
|
|
FilterListID int64 `json:"filter_list_id"`
|
2020-12-17 13:32:46 +03:00
|
|
|
}
|
|
|
|
|
2019-11-06 15:13:31 +03:00
|
|
|
type checkHostResp struct {
|
2020-12-17 13:32:46 +03:00
|
|
|
Reason string `json:"reason"`
|
|
|
|
|
2020-12-21 17:48:07 +03:00
|
|
|
// Rule is the text of the matched rule.
|
|
|
|
//
|
|
|
|
// Deprecated: Use Rules[*].Text.
|
2020-12-17 13:32:46 +03:00
|
|
|
Rule string `json:"rule"`
|
|
|
|
|
|
|
|
Rules []*checkHostRespRule `json:"rules"`
|
2019-11-06 15:13:31 +03:00
|
|
|
|
|
|
|
// for FilteredBlockedService:
|
|
|
|
SvcName string `json:"service_name"`
|
|
|
|
|
2020-12-29 19:53:56 +03:00
|
|
|
// for Rewrite:
|
2019-11-06 15:13:31 +03:00
|
|
|
CanonName string `json:"cname"` // CNAME value
|
|
|
|
IPList []net.IP `json:"ip_addrs"` // list of IP addresses
|
2022-08-17 20:40:47 +03:00
|
|
|
|
|
|
|
// FilterID is the ID of the rule's filter list.
|
|
|
|
//
|
|
|
|
// Deprecated: Use Rules[*].FilterListID.
|
|
|
|
FilterID int64 `json:"filter_id"`
|
2019-11-06 15:13:31 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleCheckHost(w http.ResponseWriter, r *http.Request) {
|
|
|
|
host := r.URL.Query().Get("name")
|
2019-11-06 15:13:31 +03:00
|
|
|
|
2023-06-14 15:08:57 +03:00
|
|
|
setts := d.Settings()
|
2019-11-06 15:13:31 +03:00
|
|
|
setts.FilteringEnabled = true
|
2021-10-20 19:52:13 +03:00
|
|
|
setts.ProtectionEnabled = true
|
2022-09-23 13:23:35 +03:00
|
|
|
|
2023-06-14 15:08:57 +03:00
|
|
|
d.ApplyBlockedServices(setts)
|
|
|
|
result, err := d.CheckHost(host, dns.TypeA, setts)
|
2019-11-06 15:13:31 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(
|
|
|
|
r,
|
|
|
|
w,
|
|
|
|
http.StatusInternalServerError,
|
|
|
|
"couldn't apply filtering: %s: %s",
|
|
|
|
host,
|
|
|
|
err,
|
|
|
|
)
|
|
|
|
|
2019-11-06 15:13:31 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
rulesLen := len(result.Rules)
|
|
|
|
resp := checkHostResp{
|
|
|
|
Reason: result.Reason.String(),
|
|
|
|
SvcName: result.ServiceName,
|
|
|
|
CanonName: result.CanonName,
|
|
|
|
IPList: result.IPList,
|
|
|
|
Rules: make([]*checkHostRespRule, len(result.Rules)),
|
|
|
|
}
|
2020-12-17 13:32:46 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
if rulesLen > 0 {
|
2020-12-21 17:48:07 +03:00
|
|
|
resp.FilterID = result.Rules[0].FilterListID
|
|
|
|
resp.Rule = result.Rules[0].Text
|
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
for i, r := range result.Rules {
|
|
|
|
resp.Rules[i] = &checkHostRespRule{
|
|
|
|
FilterListID: r.FilterListID,
|
|
|
|
Text: r.Text,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-04 14:35:10 +03:00
|
|
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
2019-11-06 15:13:31 +03:00
|
|
|
}
|
|
|
|
|
2023-05-03 19:52:06 +03:00
|
|
|
// setProtectedBool sets the value of a boolean pointer under a lock. l must
|
|
|
|
// protect the value under ptr.
|
|
|
|
//
|
|
|
|
// TODO(e.burkov): Make it generic?
|
|
|
|
func setProtectedBool(mu *sync.RWMutex, ptr *bool, val bool) {
|
|
|
|
mu.Lock()
|
|
|
|
defer mu.Unlock()
|
|
|
|
|
|
|
|
*ptr = val
|
|
|
|
}
|
|
|
|
|
|
|
|
// protectedBool gets the value of a boolean pointer under a read lock. l must
|
|
|
|
// protect the value under ptr.
|
|
|
|
//
|
|
|
|
// TODO(e.burkov): Make it generic?
|
|
|
|
func protectedBool(mu *sync.RWMutex, ptr *bool) (val bool) {
|
|
|
|
mu.RLock()
|
|
|
|
defer mu.RUnlock()
|
|
|
|
|
|
|
|
return *ptr
|
|
|
|
}
|
|
|
|
|
|
|
|
// handleSafeBrowsingEnable is the handler for the POST
|
|
|
|
// /control/safebrowsing/enable HTTP API.
|
|
|
|
func (d *DNSFilter) handleSafeBrowsingEnable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
setProtectedBool(&d.confLock, &d.Config.SafeBrowsingEnabled, true)
|
|
|
|
d.Config.ConfigModified()
|
|
|
|
}
|
|
|
|
|
|
|
|
// handleSafeBrowsingDisable is the handler for the POST
|
|
|
|
// /control/safebrowsing/disable HTTP API.
|
|
|
|
func (d *DNSFilter) handleSafeBrowsingDisable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
setProtectedBool(&d.confLock, &d.Config.SafeBrowsingEnabled, false)
|
|
|
|
d.Config.ConfigModified()
|
|
|
|
}
|
|
|
|
|
|
|
|
// handleSafeBrowsingStatus is the handler for the GET
|
|
|
|
// /control/safebrowsing/status HTTP API.
|
|
|
|
func (d *DNSFilter) handleSafeBrowsingStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
resp := &struct {
|
|
|
|
Enabled bool `json:"enabled"`
|
|
|
|
}{
|
|
|
|
Enabled: protectedBool(&d.confLock, &d.Config.SafeBrowsingEnabled),
|
|
|
|
}
|
|
|
|
|
|
|
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
|
|
|
}
|
|
|
|
|
|
|
|
// handleParentalEnable is the handler for the POST /control/parental/enable
|
|
|
|
// HTTP API.
|
|
|
|
func (d *DNSFilter) handleParentalEnable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
setProtectedBool(&d.confLock, &d.Config.ParentalEnabled, true)
|
|
|
|
d.Config.ConfigModified()
|
|
|
|
}
|
|
|
|
|
|
|
|
// handleParentalDisable is the handler for the POST /control/parental/disable
|
|
|
|
// HTTP API.
|
|
|
|
func (d *DNSFilter) handleParentalDisable(w http.ResponseWriter, r *http.Request) {
|
|
|
|
setProtectedBool(&d.confLock, &d.Config.ParentalEnabled, false)
|
|
|
|
d.Config.ConfigModified()
|
|
|
|
}
|
|
|
|
|
|
|
|
// handleParentalStatus is the handler for the GET /control/parental/status
|
|
|
|
// HTTP API.
|
|
|
|
func (d *DNSFilter) handleParentalStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
resp := &struct {
|
|
|
|
Enabled bool `json:"enabled"`
|
|
|
|
}{
|
|
|
|
Enabled: protectedBool(&d.confLock, &d.Config.ParentalEnabled),
|
|
|
|
}
|
|
|
|
|
|
|
|
_ = aghhttp.WriteJSONResponse(w, r, resp)
|
|
|
|
}
|
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
// RegisterFilteringHandlers - register handlers
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) RegisterFilteringHandlers() {
|
|
|
|
registerHTTP := d.HTTPRegister
|
|
|
|
if registerHTTP == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
registerHTTP(http.MethodPost, "/control/safebrowsing/enable", d.handleSafeBrowsingEnable)
|
|
|
|
registerHTTP(http.MethodPost, "/control/safebrowsing/disable", d.handleSafeBrowsingDisable)
|
|
|
|
registerHTTP(http.MethodGet, "/control/safebrowsing/status", d.handleSafeBrowsingStatus)
|
|
|
|
|
|
|
|
registerHTTP(http.MethodPost, "/control/parental/enable", d.handleParentalEnable)
|
|
|
|
registerHTTP(http.MethodPost, "/control/parental/disable", d.handleParentalDisable)
|
|
|
|
registerHTTP(http.MethodGet, "/control/parental/status", d.handleParentalStatus)
|
|
|
|
|
|
|
|
registerHTTP(http.MethodPost, "/control/safesearch/enable", d.handleSafeSearchEnable)
|
|
|
|
registerHTTP(http.MethodPost, "/control/safesearch/disable", d.handleSafeSearchDisable)
|
|
|
|
registerHTTP(http.MethodGet, "/control/safesearch/status", d.handleSafeSearchStatus)
|
2023-03-23 15:25:58 +03:00
|
|
|
registerHTTP(http.MethodPut, "/control/safesearch/settings", d.handleSafeSearchSettings)
|
2022-09-23 13:23:35 +03:00
|
|
|
|
|
|
|
registerHTTP(http.MethodGet, "/control/rewrite/list", d.handleRewriteList)
|
|
|
|
registerHTTP(http.MethodPost, "/control/rewrite/add", d.handleRewriteAdd)
|
2023-05-12 13:04:19 +03:00
|
|
|
registerHTTP(http.MethodPut, "/control/rewrite/update", d.handleRewriteUpdate)
|
2022-09-23 13:23:35 +03:00
|
|
|
registerHTTP(http.MethodPost, "/control/rewrite/delete", d.handleRewriteDelete)
|
|
|
|
|
2022-10-27 15:46:25 +03:00
|
|
|
registerHTTP(http.MethodGet, "/control/blocked_services/services", d.handleBlockedServicesIDs)
|
|
|
|
registerHTTP(http.MethodGet, "/control/blocked_services/all", d.handleBlockedServicesAll)
|
2022-09-23 13:23:35 +03:00
|
|
|
registerHTTP(http.MethodGet, "/control/blocked_services/list", d.handleBlockedServicesList)
|
|
|
|
registerHTTP(http.MethodPost, "/control/blocked_services/set", d.handleBlockedServicesSet)
|
|
|
|
|
|
|
|
registerHTTP(http.MethodGet, "/control/filtering/status", d.handleFilteringStatus)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/config", d.handleFilteringConfig)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/add_url", d.handleFilteringAddURL)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/remove_url", d.handleFilteringRemoveURL)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/set_url", d.handleFilteringSetURL)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/refresh", d.handleFilteringRefresh)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/set_rules", d.handleFilteringSetRules)
|
|
|
|
registerHTTP(http.MethodGet, "/control/filtering/check_host", d.handleCheckHost)
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
// ValidateUpdateIvl returns false if i is not a valid filters update interval.
|
|
|
|
func ValidateUpdateIvl(i uint32) bool {
|
2019-09-04 14:12:00 +03:00
|
|
|
return i == 0 || i == 1 || i == 12 || i == 1*24 || i == 3*24 || i == 7*24
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|