2022-09-23 13:23:35 +03:00
|
|
|
package filtering
|
2019-08-21 14:39:37 +03:00
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2019-11-06 15:13:31 +03:00
|
|
|
"net"
|
2019-08-21 14:39:37 +03:00
|
|
|
"net/http"
|
2019-09-04 14:12:00 +03:00
|
|
|
"net/url"
|
2019-08-21 14:39:37 +03:00
|
|
|
"os"
|
2020-03-05 14:37:43 +03:00
|
|
|
"path/filepath"
|
2019-09-04 14:12:00 +03:00
|
|
|
"time"
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2021-12-16 20:54:59 +03:00
|
|
|
"github.com/AdguardTeam/AdGuardHome/internal/aghhttp"
|
2022-08-17 20:40:47 +03:00
|
|
|
"github.com/AdguardTeam/golibs/errors"
|
2019-08-21 14:39:37 +03:00
|
|
|
"github.com/AdguardTeam/golibs/log"
|
2019-11-06 15:13:31 +03:00
|
|
|
"github.com/miekg/dns"
|
2019-08-21 14:39:37 +03:00
|
|
|
)
|
|
|
|
|
2021-03-15 14:19:04 +03:00
|
|
|
// validateFilterURL validates the filter list URL or file name.
|
|
|
|
func validateFilterURL(urlStr string) (err error) {
|
|
|
|
if filepath.IsAbs(urlStr) {
|
|
|
|
_, err = os.Stat(urlStr)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("checking filter file: %w", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2020-03-05 14:37:43 +03:00
|
|
|
}
|
|
|
|
|
2021-03-15 14:19:04 +03:00
|
|
|
url, err := url.ParseRequestURI(urlStr)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2021-03-15 14:19:04 +03:00
|
|
|
return fmt.Errorf("checking filter url: %w", err)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
2021-03-15 14:19:04 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
if s := url.Scheme; s != aghhttp.SchemeHTTP && s != aghhttp.SchemeHTTPS {
|
2021-03-15 14:19:04 +03:00
|
|
|
return fmt.Errorf("checking filter url: invalid scheme %q", s)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
2021-03-15 14:19:04 +03:00
|
|
|
|
|
|
|
return nil
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
type filterAddJSON struct {
|
2020-02-26 19:58:25 +03:00
|
|
|
Name string `json:"name"`
|
|
|
|
URL string `json:"url"`
|
|
|
|
Whitelist bool `json:"whitelist"`
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringAddURL(w http.ResponseWriter, r *http.Request) {
|
2019-09-04 14:12:00 +03:00
|
|
|
fj := filterAddJSON{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&fj)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "Failed to parse request body json: %s", err)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-03-15 14:19:04 +03:00
|
|
|
err = validateFilterURL(fj.URL)
|
|
|
|
if err != nil {
|
2022-08-17 20:40:47 +03:00
|
|
|
err = fmt.Errorf("invalid url: %s", err)
|
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "%s", err)
|
2021-03-15 14:19:04 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check for duplicates
|
2022-09-23 13:23:35 +03:00
|
|
|
if d.filterExists(fj.URL) {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "Filter URL already added -- %s", fj.URL)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set necessary properties
|
2022-09-23 13:23:35 +03:00
|
|
|
filt := FilterYAML{
|
2019-09-04 14:12:00 +03:00
|
|
|
Enabled: true,
|
|
|
|
URL: fj.URL,
|
|
|
|
Name: fj.Name,
|
2020-02-26 19:58:25 +03:00
|
|
|
white: fj.Whitelist,
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
2020-03-17 15:00:40 +03:00
|
|
|
filt.ID = assignUniqueFilterID()
|
2019-08-21 14:39:37 +03:00
|
|
|
|
|
|
|
// Download the filter contents
|
2022-09-23 13:23:35 +03:00
|
|
|
ok, err := d.update(&filt)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(
|
|
|
|
r,
|
|
|
|
w,
|
|
|
|
http.StatusBadRequest,
|
|
|
|
"Couldn't fetch filter from url %s: %s",
|
|
|
|
filt.URL,
|
|
|
|
err,
|
|
|
|
)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
if !ok {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(
|
|
|
|
r,
|
|
|
|
w,
|
|
|
|
http.StatusBadRequest,
|
|
|
|
"Filter at the url %s is invalid (maybe it points to blank page?)",
|
|
|
|
filt.URL,
|
|
|
|
)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-12-16 20:54:59 +03:00
|
|
|
// URL is assumed valid so append it to filters, update config, write new
|
|
|
|
// file and reload it to engines.
|
2022-09-23 13:23:35 +03:00
|
|
|
if !d.filterAdd(filt) {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "Filter URL already added -- %s", filt.URL)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
|
|
|
d.EnableFilters(true)
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2020-03-17 15:00:40 +03:00
|
|
|
_, err = fmt.Fprintf(w, "OK %d rules\n", filt.RulesCount)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusInternalServerError, "Couldn't write body: %s", err)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringRemoveURL(w http.ResponseWriter, r *http.Request) {
|
2019-08-21 14:39:37 +03:00
|
|
|
type request struct {
|
2020-02-26 19:58:25 +03:00
|
|
|
URL string `json:"url"`
|
|
|
|
Whitelist bool `json:"whitelist"`
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
2020-11-05 13:59:57 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
req := request{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&req)
|
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "failed to parse request body json: %s", err)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.filtersMu.Lock()
|
|
|
|
filters := &d.Filters
|
2020-02-26 19:58:25 +03:00
|
|
|
if req.Whitelist {
|
2022-09-23 13:23:35 +03:00
|
|
|
filters = &d.WhitelistFilters
|
2020-02-26 19:58:25 +03:00
|
|
|
}
|
2020-11-05 13:59:57 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
var deleted FilterYAML
|
|
|
|
var newFilters []FilterYAML
|
|
|
|
for _, flt := range *filters {
|
|
|
|
if flt.URL != req.URL {
|
|
|
|
newFilters = append(newFilters, flt)
|
2020-11-05 13:59:57 +03:00
|
|
|
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
deleted = flt
|
|
|
|
path := flt.Path(d.DataDir)
|
2020-11-05 13:59:57 +03:00
|
|
|
err = os.Rename(path, path+".old")
|
|
|
|
if err != nil {
|
|
|
|
log.Error("deleting filter %q: %s", path, err)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
}
|
2020-11-05 13:59:57 +03:00
|
|
|
|
2020-02-26 19:58:25 +03:00
|
|
|
*filters = newFilters
|
2022-09-23 13:23:35 +03:00
|
|
|
d.filtersMu.Unlock()
|
2019-10-09 19:51:26 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
|
|
|
d.EnableFilters(true)
|
2019-10-09 19:51:26 +03:00
|
|
|
|
2020-11-05 13:59:57 +03:00
|
|
|
// NOTE: The old files "filter.txt.old" aren't deleted. It's not really
|
|
|
|
// necessary, but will require the additional complicated code to run
|
|
|
|
// after enableFilters is done.
|
|
|
|
//
|
|
|
|
// TODO(a.garipov): Make sure the above comment is true.
|
|
|
|
|
|
|
|
_, err = fmt.Fprintf(w, "OK %d rules\n", deleted.RulesCount)
|
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusInternalServerError, "couldn't write body: %s", err)
|
2020-11-05 13:59:57 +03:00
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2022-08-17 20:40:47 +03:00
|
|
|
type filterURLReqData struct {
|
2019-11-06 15:56:29 +03:00
|
|
|
Name string `json:"name"`
|
2019-09-04 14:12:00 +03:00
|
|
|
URL string `json:"url"`
|
|
|
|
Enabled bool `json:"enabled"`
|
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2019-11-06 15:56:29 +03:00
|
|
|
type filterURLReq struct {
|
2022-08-17 20:40:47 +03:00
|
|
|
Data *filterURLReqData `json:"data"`
|
|
|
|
URL string `json:"url"`
|
|
|
|
Whitelist bool `json:"whitelist"`
|
2019-11-06 15:56:29 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringSetURL(w http.ResponseWriter, r *http.Request) {
|
2019-11-06 15:56:29 +03:00
|
|
|
fj := filterURLReq{}
|
2019-09-04 14:12:00 +03:00
|
|
|
err := json.NewDecoder(r.Body).Decode(&fj)
|
|
|
|
if err != nil {
|
2022-09-23 13:23:35 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "decoding request: %s", err)
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-08-17 20:40:47 +03:00
|
|
|
if fj.Data == nil {
|
2022-09-23 13:23:35 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "%s", errors.Error("data is absent"))
|
2022-08-17 20:40:47 +03:00
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-03-15 14:19:04 +03:00
|
|
|
err = validateFilterURL(fj.Data.URL)
|
|
|
|
if err != nil {
|
2022-09-23 13:23:35 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "invalid url: %s", err)
|
2021-03-15 14:19:04 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
filt := FilterYAML{
|
2019-11-06 15:56:29 +03:00
|
|
|
Enabled: fj.Data.Enabled,
|
|
|
|
Name: fj.Data.Name,
|
|
|
|
URL: fj.Data.URL,
|
|
|
|
}
|
2022-09-23 13:23:35 +03:00
|
|
|
status := d.filterSetProperties(fj.URL, filt, fj.Whitelist)
|
2019-11-06 15:56:29 +03:00
|
|
|
if (status & statusFound) == 0 {
|
2022-09-23 13:23:35 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "URL doesn't exist")
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
2019-11-06 15:56:29 +03:00
|
|
|
if (status & statusURLExists) != 0 {
|
2022-09-23 13:23:35 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "URL already exists")
|
|
|
|
|
2019-11-06 15:56:29 +03:00
|
|
|
return
|
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
2022-08-17 20:40:47 +03:00
|
|
|
|
|
|
|
restart := (status & statusEnabledChanged) != 0
|
2020-03-11 19:17:46 +03:00
|
|
|
if (status&statusUpdateRequired) != 0 && fj.Data.Enabled {
|
2022-09-23 13:23:35 +03:00
|
|
|
// download new filter and apply its rules.
|
|
|
|
nUpdated := d.refreshFilters(!fj.Whitelist, fj.Whitelist, false)
|
2020-03-11 19:17:46 +03:00
|
|
|
// if at least 1 filter has been updated, refreshFilters() restarts the filtering automatically
|
|
|
|
// if not - we restart the filtering ourselves
|
|
|
|
restart = false
|
|
|
|
if nUpdated == 0 {
|
|
|
|
restart = true
|
|
|
|
}
|
|
|
|
}
|
2022-08-17 20:40:47 +03:00
|
|
|
|
2020-03-11 19:17:46 +03:00
|
|
|
if restart {
|
2022-09-23 13:23:35 +03:00
|
|
|
d.EnableFilters(true)
|
2019-11-06 15:56:29 +03:00
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2022-09-29 19:04:26 +03:00
|
|
|
// filteringRulesReq is the JSON structure for settings custom filtering rules.
|
|
|
|
type filteringRulesReq struct {
|
|
|
|
Rules []string `json:"rules"`
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringSetRules(w http.ResponseWriter, r *http.Request) {
|
2022-09-29 19:04:26 +03:00
|
|
|
if aghhttp.WriteTextPlainDeprecated(w, r) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
req := &filteringRulesReq{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(req)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2022-09-29 19:04:26 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "reading req: %s", err)
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-29 19:04:26 +03:00
|
|
|
d.UserRules = req.Rules
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
|
|
|
d.EnableFilters(true)
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringRefresh(w http.ResponseWriter, r *http.Request) {
|
2020-02-28 12:40:16 +03:00
|
|
|
type Req struct {
|
|
|
|
White bool `json:"whitelist"`
|
|
|
|
}
|
2020-02-26 19:58:25 +03:00
|
|
|
type Resp struct {
|
|
|
|
Updated int `json:"updated"`
|
|
|
|
}
|
|
|
|
resp := Resp{}
|
|
|
|
var err error
|
|
|
|
|
2020-02-28 12:40:16 +03:00
|
|
|
req := Req{}
|
|
|
|
err = json.NewDecoder(r.Body).Decode(&req)
|
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "json decode: %s", err)
|
|
|
|
|
2020-02-28 12:40:16 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
var ok bool
|
|
|
|
resp.Updated, _, ok = d.tryRefreshFilters(!req.White, req.White, true)
|
|
|
|
if !ok {
|
|
|
|
aghhttp.Error(
|
|
|
|
r,
|
|
|
|
w,
|
|
|
|
http.StatusInternalServerError,
|
|
|
|
"filters update procedure is already running",
|
|
|
|
)
|
2021-12-16 20:54:59 +03:00
|
|
|
|
2019-10-10 17:12:32 +03:00
|
|
|
return
|
|
|
|
}
|
2020-02-26 19:58:25 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
|
|
|
|
err = json.NewEncoder(w).Encode(resp)
|
2020-02-26 19:58:25 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusInternalServerError, "json encode: %s", err)
|
|
|
|
|
2020-02-26 19:58:25 +03:00
|
|
|
return
|
|
|
|
}
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
type filterJSON struct {
|
|
|
|
URL string `json:"url"`
|
|
|
|
Name string `json:"name"`
|
2022-03-12 14:46:15 +03:00
|
|
|
LastUpdated string `json:"last_updated,omitempty"`
|
2022-08-17 20:40:47 +03:00
|
|
|
ID int64 `json:"id"`
|
|
|
|
RulesCount uint32 `json:"rules_count"`
|
|
|
|
Enabled bool `json:"enabled"`
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
type filteringConfig struct {
|
2020-02-26 19:58:25 +03:00
|
|
|
Filters []filterJSON `json:"filters"`
|
|
|
|
WhitelistFilters []filterJSON `json:"whitelist_filters"`
|
|
|
|
UserRules []string `json:"user_rules"`
|
2022-08-17 20:40:47 +03:00
|
|
|
Interval uint32 `json:"interval"` // in hours
|
|
|
|
Enabled bool `json:"enabled"`
|
2020-02-26 19:58:25 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func filterToJSON(f FilterYAML) filterJSON {
|
2020-02-26 19:58:25 +03:00
|
|
|
fj := filterJSON{
|
|
|
|
ID: f.ID,
|
|
|
|
Enabled: f.Enabled,
|
|
|
|
URL: f.URL,
|
|
|
|
Name: f.Name,
|
|
|
|
RulesCount: uint32(f.RulesCount),
|
|
|
|
}
|
|
|
|
|
|
|
|
if !f.LastUpdated.IsZero() {
|
|
|
|
fj.LastUpdated = f.LastUpdated.Format(time.RFC3339)
|
|
|
|
}
|
|
|
|
|
|
|
|
return fj
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get filtering configuration
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringStatus(w http.ResponseWriter, r *http.Request) {
|
2019-09-04 14:12:00 +03:00
|
|
|
resp := filteringConfig{}
|
2022-09-23 13:23:35 +03:00
|
|
|
d.filtersMu.RLock()
|
|
|
|
resp.Enabled = d.FilteringEnabled
|
|
|
|
resp.Interval = d.FiltersUpdateIntervalHours
|
|
|
|
for _, f := range d.Filters {
|
2020-02-26 19:58:25 +03:00
|
|
|
fj := filterToJSON(f)
|
2019-09-04 14:12:00 +03:00
|
|
|
resp.Filters = append(resp.Filters, fj)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
2022-09-23 13:23:35 +03:00
|
|
|
for _, f := range d.WhitelistFilters {
|
2020-02-26 19:58:25 +03:00
|
|
|
fj := filterToJSON(f)
|
|
|
|
resp.WhitelistFilters = append(resp.WhitelistFilters, fj)
|
|
|
|
}
|
2022-09-23 13:23:35 +03:00
|
|
|
resp.UserRules = d.UserRules
|
|
|
|
d.filtersMu.RUnlock()
|
2019-08-21 14:39:37 +03:00
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
jsonVal, err := json.Marshal(resp)
|
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusInternalServerError, "json encode: %s", err)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
2019-09-04 14:12:00 +03:00
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
_, err = w.Write(jsonVal)
|
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusInternalServerError, "http write: %s", err)
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
// Set filtering configuration
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleFilteringConfig(w http.ResponseWriter, r *http.Request) {
|
2019-09-04 14:12:00 +03:00
|
|
|
req := filteringConfig{}
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&req)
|
2019-08-21 14:39:37 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "json decode: %s", err)
|
|
|
|
|
2019-08-21 14:39:37 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
if !ValidateUpdateIvl(req.Interval) {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusBadRequest, "Unsupported interval")
|
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-05-24 14:48:42 +03:00
|
|
|
func() {
|
2022-09-23 13:23:35 +03:00
|
|
|
d.filtersMu.Lock()
|
|
|
|
defer d.filtersMu.Unlock()
|
2021-05-24 14:48:42 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.FilteringEnabled = req.Enabled
|
|
|
|
d.FiltersUpdateIntervalHours = req.Interval
|
2021-05-24 14:48:42 +03:00
|
|
|
}()
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
d.ConfigModified()
|
|
|
|
d.EnableFilters(true)
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
type checkHostRespRule struct {
|
|
|
|
Text string `json:"text"`
|
2022-08-17 20:40:47 +03:00
|
|
|
FilterListID int64 `json:"filter_list_id"`
|
2020-12-17 13:32:46 +03:00
|
|
|
}
|
|
|
|
|
2019-11-06 15:13:31 +03:00
|
|
|
type checkHostResp struct {
|
2020-12-17 13:32:46 +03:00
|
|
|
Reason string `json:"reason"`
|
|
|
|
|
2020-12-21 17:48:07 +03:00
|
|
|
// Rule is the text of the matched rule.
|
|
|
|
//
|
|
|
|
// Deprecated: Use Rules[*].Text.
|
2020-12-17 13:32:46 +03:00
|
|
|
Rule string `json:"rule"`
|
|
|
|
|
|
|
|
Rules []*checkHostRespRule `json:"rules"`
|
2019-11-06 15:13:31 +03:00
|
|
|
|
|
|
|
// for FilteredBlockedService:
|
|
|
|
SvcName string `json:"service_name"`
|
|
|
|
|
2020-12-29 19:53:56 +03:00
|
|
|
// for Rewrite:
|
2019-11-06 15:13:31 +03:00
|
|
|
CanonName string `json:"cname"` // CNAME value
|
|
|
|
IPList []net.IP `json:"ip_addrs"` // list of IP addresses
|
2022-08-17 20:40:47 +03:00
|
|
|
|
|
|
|
// FilterID is the ID of the rule's filter list.
|
|
|
|
//
|
|
|
|
// Deprecated: Use Rules[*].FilterListID.
|
|
|
|
FilterID int64 `json:"filter_id"`
|
2019-11-06 15:13:31 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) handleCheckHost(w http.ResponseWriter, r *http.Request) {
|
|
|
|
host := r.URL.Query().Get("name")
|
2019-11-06 15:13:31 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
setts := d.GetConfig()
|
2019-11-06 15:13:31 +03:00
|
|
|
setts.FilteringEnabled = true
|
2021-10-20 19:52:13 +03:00
|
|
|
setts.ProtectionEnabled = true
|
2022-09-23 13:23:35 +03:00
|
|
|
|
|
|
|
d.ApplyBlockedServices(&setts, nil)
|
|
|
|
result, err := d.CheckHost(host, dns.TypeA, &setts)
|
2019-11-06 15:13:31 +03:00
|
|
|
if err != nil {
|
2021-12-16 20:54:59 +03:00
|
|
|
aghhttp.Error(
|
|
|
|
r,
|
|
|
|
w,
|
|
|
|
http.StatusInternalServerError,
|
|
|
|
"couldn't apply filtering: %s: %s",
|
|
|
|
host,
|
|
|
|
err,
|
|
|
|
)
|
|
|
|
|
2019-11-06 15:13:31 +03:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
rulesLen := len(result.Rules)
|
|
|
|
resp := checkHostResp{
|
|
|
|
Reason: result.Reason.String(),
|
|
|
|
SvcName: result.ServiceName,
|
|
|
|
CanonName: result.CanonName,
|
|
|
|
IPList: result.IPList,
|
|
|
|
Rules: make([]*checkHostRespRule, len(result.Rules)),
|
|
|
|
}
|
2020-12-17 13:32:46 +03:00
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
if rulesLen > 0 {
|
2020-12-21 17:48:07 +03:00
|
|
|
resp.FilterID = result.Rules[0].FilterListID
|
|
|
|
resp.Rule = result.Rules[0].Text
|
|
|
|
}
|
|
|
|
|
2020-12-17 13:32:46 +03:00
|
|
|
for i, r := range result.Rules {
|
|
|
|
resp.Rules[i] = &checkHostRespRule{
|
|
|
|
FilterListID: r.FilterListID,
|
|
|
|
Text: r.Text,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
err = json.NewEncoder(w).Encode(resp)
|
2019-11-06 15:13:31 +03:00
|
|
|
if err != nil {
|
2022-09-23 13:23:35 +03:00
|
|
|
aghhttp.Error(r, w, http.StatusInternalServerError, "encoding response: %s", err)
|
2019-11-06 15:13:31 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-04 14:12:00 +03:00
|
|
|
// RegisterFilteringHandlers - register handlers
|
2022-09-23 13:23:35 +03:00
|
|
|
func (d *DNSFilter) RegisterFilteringHandlers() {
|
|
|
|
registerHTTP := d.HTTPRegister
|
|
|
|
if registerHTTP == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
registerHTTP(http.MethodPost, "/control/safebrowsing/enable", d.handleSafeBrowsingEnable)
|
|
|
|
registerHTTP(http.MethodPost, "/control/safebrowsing/disable", d.handleSafeBrowsingDisable)
|
|
|
|
registerHTTP(http.MethodGet, "/control/safebrowsing/status", d.handleSafeBrowsingStatus)
|
|
|
|
|
|
|
|
registerHTTP(http.MethodPost, "/control/parental/enable", d.handleParentalEnable)
|
|
|
|
registerHTTP(http.MethodPost, "/control/parental/disable", d.handleParentalDisable)
|
|
|
|
registerHTTP(http.MethodGet, "/control/parental/status", d.handleParentalStatus)
|
|
|
|
|
|
|
|
registerHTTP(http.MethodPost, "/control/safesearch/enable", d.handleSafeSearchEnable)
|
|
|
|
registerHTTP(http.MethodPost, "/control/safesearch/disable", d.handleSafeSearchDisable)
|
|
|
|
registerHTTP(http.MethodGet, "/control/safesearch/status", d.handleSafeSearchStatus)
|
|
|
|
|
|
|
|
registerHTTP(http.MethodGet, "/control/rewrite/list", d.handleRewriteList)
|
|
|
|
registerHTTP(http.MethodPost, "/control/rewrite/add", d.handleRewriteAdd)
|
|
|
|
registerHTTP(http.MethodPost, "/control/rewrite/delete", d.handleRewriteDelete)
|
|
|
|
|
|
|
|
registerHTTP(http.MethodGet, "/control/blocked_services/services", d.handleBlockedServicesAvailableServices)
|
|
|
|
registerHTTP(http.MethodGet, "/control/blocked_services/list", d.handleBlockedServicesList)
|
|
|
|
registerHTTP(http.MethodPost, "/control/blocked_services/set", d.handleBlockedServicesSet)
|
|
|
|
|
|
|
|
registerHTTP(http.MethodGet, "/control/filtering/status", d.handleFilteringStatus)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/config", d.handleFilteringConfig)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/add_url", d.handleFilteringAddURL)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/remove_url", d.handleFilteringRemoveURL)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/set_url", d.handleFilteringSetURL)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/refresh", d.handleFilteringRefresh)
|
|
|
|
registerHTTP(http.MethodPost, "/control/filtering/set_rules", d.handleFilteringSetRules)
|
|
|
|
registerHTTP(http.MethodGet, "/control/filtering/check_host", d.handleCheckHost)
|
2019-09-04 14:12:00 +03:00
|
|
|
}
|
|
|
|
|
2022-09-23 13:23:35 +03:00
|
|
|
// ValidateUpdateIvl returns false if i is not a valid filters update interval.
|
|
|
|
func ValidateUpdateIvl(i uint32) bool {
|
2019-09-04 14:12:00 +03:00
|
|
|
return i == 0 || i == 1 || i == 12 || i == 1*24 || i == 3*24 || i == 7*24
|
2019-08-21 14:39:37 +03:00
|
|
|
}
|