mirror of
https://github.com/Orange-OpenSource/hurl.git
synced 2024-11-23 09:44:22 +03:00
Replace Subquery by Filter
This commit is contained in:
parent
a32535d9be
commit
89f4c123cd
@ -183,7 +183,7 @@ pub mod tests {
|
||||
},
|
||||
},
|
||||
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
},
|
||||
line_terminator0: LineTerminator {
|
||||
space0: whitespace.clone(),
|
||||
|
@ -173,8 +173,6 @@ pub enum RunnerError {
|
||||
QueryInvalidJson,
|
||||
NoQueryResult,
|
||||
|
||||
SubqueryInvalidInput(String),
|
||||
|
||||
// Predicate
|
||||
PredicateType,
|
||||
PredicateValue(Value),
|
||||
@ -206,6 +204,10 @@ pub enum RunnerError {
|
||||
UnauthorizedFileAccess {
|
||||
path: PathBuf,
|
||||
},
|
||||
|
||||
// Filter
|
||||
FilterMissingInput {},
|
||||
FilterInvalidInput(String),
|
||||
}
|
||||
|
||||
// endregion
|
||||
|
@ -37,7 +37,6 @@ impl Error for runner::Error {
|
||||
RunnerError::QueryInvalidJson { .. } => "Invalid JSON".to_string(),
|
||||
RunnerError::QueryInvalidJsonpathExpression { .. } => "Invalid JSONPath".to_string(),
|
||||
RunnerError::PredicateType { .. } => "Assert - inconsistent predicate type".to_string(),
|
||||
RunnerError::SubqueryInvalidInput { .. } => "Subquery error".to_string(),
|
||||
RunnerError::InvalidDecoding { .. } => "Invalid decoding".to_string(),
|
||||
RunnerError::InvalidCharset { .. } => "Invalid charset".to_string(),
|
||||
RunnerError::AssertFailure { .. } => "Assert failure".to_string(),
|
||||
@ -47,6 +46,8 @@ impl Error for runner::Error {
|
||||
RunnerError::CouldNotUncompressResponse(..) => "Decompression error".to_string(),
|
||||
RunnerError::InvalidJson { .. } => "Invalid JSON".to_string(),
|
||||
RunnerError::UnauthorizedFileAccess { .. } => "Unauthorized file access".to_string(),
|
||||
RunnerError::FilterMissingInput { .. } => "Filter Error".to_string(),
|
||||
RunnerError::FilterInvalidInput { .. } => "Filter Error".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -98,9 +99,6 @@ impl Error for runner::Error {
|
||||
RunnerError::PredicateType { .. } => {
|
||||
"predicate type inconsistent with value return by query".to_string()
|
||||
}
|
||||
RunnerError::SubqueryInvalidInput(t) => {
|
||||
format!("type <{}> from query result and subquery do not match", t)
|
||||
}
|
||||
RunnerError::InvalidDecoding { charset } => {
|
||||
format!("the body can not be decoded with charset '{}'", charset)
|
||||
}
|
||||
@ -142,6 +140,12 @@ impl Error for runner::Error {
|
||||
path.to_str().unwrap()
|
||||
)
|
||||
}
|
||||
RunnerError::FilterMissingInput { .. } => {
|
||||
"Missing value to apply the filter".to_string()
|
||||
}
|
||||
RunnerError::FilterInvalidInput(message) => {
|
||||
format!("Invalid Filter Input <{}>", message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
186
packages/hurl/src/runner/filter.rs
Normal file
186
packages/hurl/src/runner/filter.rs
Normal file
@ -0,0 +1,186 @@
|
||||
use crate::runner::template::eval_template;
|
||||
use crate::runner::{Error, RunnerError, Value};
|
||||
use hurl_core::ast::{Filter, FilterValue, RegexValue, SourceInfo};
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub fn eval_filter(
|
||||
filter: &Filter,
|
||||
value: &Value,
|
||||
variables: &HashMap<String, Value>,
|
||||
) -> Result<Option<Value>, Error> {
|
||||
match &filter.value {
|
||||
FilterValue::Regex {
|
||||
value: regex_value, ..
|
||||
} => eval_regex(value, regex_value, variables, &filter.source_info),
|
||||
FilterValue::Count {} => eval_count(value, &filter.source_info),
|
||||
FilterValue::EscapeUrl { .. } => todo!(),
|
||||
FilterValue::UnEscapeUrl { .. } => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_regex(
|
||||
value: &Value,
|
||||
regex_value: &RegexValue,
|
||||
variables: &HashMap<String, Value>,
|
||||
source_info: &SourceInfo,
|
||||
) -> Result<Option<Value>, Error> {
|
||||
let re = match regex_value {
|
||||
RegexValue::Template(t) => {
|
||||
let value = eval_template(t, variables)?;
|
||||
match Regex::new(value.as_str()) {
|
||||
Ok(re) => re,
|
||||
Err(_) => {
|
||||
return Err(Error {
|
||||
source_info: t.source_info.clone(),
|
||||
inner: RunnerError::InvalidRegex(),
|
||||
assert: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
RegexValue::Regex(re) => re.inner.clone(),
|
||||
};
|
||||
|
||||
match value {
|
||||
Value::String(s) => match re.captures(s.as_str()) {
|
||||
Some(captures) => match captures.get(1) {
|
||||
Some(v) => Ok(Some(Value::String(v.as_str().to_string()))),
|
||||
None => Ok(None),
|
||||
},
|
||||
None => Ok(None),
|
||||
},
|
||||
v => Err(Error {
|
||||
source_info: source_info.clone(),
|
||||
inner: RunnerError::FilterInvalidInput(v._type()),
|
||||
assert: false,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_count(value: &Value, source_info: &SourceInfo) -> Result<Option<Value>, Error> {
|
||||
match value {
|
||||
Value::List(values) => Ok(Some(Value::Integer(values.len() as i64))),
|
||||
Value::Bytes(values) => Ok(Some(Value::Integer(values.len() as i64))),
|
||||
Value::Nodeset(size) => Ok(Some(Value::Integer(*size as i64))),
|
||||
v => Err(Error {
|
||||
source_info: source_info.clone(),
|
||||
inner: RunnerError::FilterInvalidInput(v._type()),
|
||||
assert: false,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use super::*;
|
||||
use hurl_core::ast::{FilterValue, SourceInfo, Template, TemplateElement, Whitespace};
|
||||
|
||||
#[test]
|
||||
pub fn filter_count() {
|
||||
// count
|
||||
let filter = Filter {
|
||||
source_info: SourceInfo::new(1, 1, 1, 6),
|
||||
value: FilterValue::Count {},
|
||||
};
|
||||
let variables = HashMap::new();
|
||||
|
||||
assert_eq!(
|
||||
eval_filter(
|
||||
&filter,
|
||||
&Value::List(vec![
|
||||
Value::Integer(1),
|
||||
Value::Integer(2),
|
||||
Value::Integer(2)
|
||||
]),
|
||||
&variables,
|
||||
)
|
||||
.unwrap(),
|
||||
Some(Value::Integer(3))
|
||||
);
|
||||
|
||||
let error = eval_filter(&filter, &Value::Bool(true), &variables)
|
||||
.err()
|
||||
.unwrap();
|
||||
assert_eq!(error.source_info, SourceInfo::new(1, 1, 1, 6));
|
||||
assert_eq!(
|
||||
error.inner,
|
||||
RunnerError::FilterInvalidInput("boolean".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_regex() {
|
||||
// regex "Hello (.*)!"
|
||||
let variables = HashMap::new();
|
||||
let whitespace = Whitespace {
|
||||
value: String::from(""),
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
};
|
||||
let filter = Filter {
|
||||
source_info: SourceInfo::new(1, 1, 1, 20),
|
||||
value: FilterValue::Regex {
|
||||
space0: whitespace,
|
||||
value: RegexValue::Template(Template {
|
||||
quotes: false,
|
||||
elements: vec![TemplateElement::String {
|
||||
value: "Hello (.*)!".to_string(),
|
||||
encoded: "Hello (.*)!".to_string(),
|
||||
}],
|
||||
source_info: SourceInfo::new(1, 7, 1, 20),
|
||||
}),
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
eval_filter(
|
||||
&filter,
|
||||
&Value::String("Hello Bob!".to_string()),
|
||||
&variables,
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
Value::String("Bob".to_string())
|
||||
);
|
||||
|
||||
let error = eval_filter(&filter, &Value::Bool(true), &variables)
|
||||
.err()
|
||||
.unwrap();
|
||||
assert_eq!(error.source_info, SourceInfo::new(1, 1, 1, 20));
|
||||
assert_eq!(
|
||||
error.inner,
|
||||
RunnerError::FilterInvalidInput("boolean".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_invalid_regex() {
|
||||
let variables = HashMap::new();
|
||||
let whitespace = Whitespace {
|
||||
value: String::from(""),
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
};
|
||||
let filter = Filter {
|
||||
source_info: SourceInfo::new(1, 1, 1, 20),
|
||||
value: FilterValue::Regex {
|
||||
space0: whitespace,
|
||||
value: RegexValue::Template(Template {
|
||||
quotes: false,
|
||||
elements: vec![TemplateElement::String {
|
||||
value: "???".to_string(),
|
||||
encoded: "???".to_string(),
|
||||
}],
|
||||
source_info: SourceInfo::new(1, 7, 1, 20),
|
||||
}),
|
||||
},
|
||||
};
|
||||
let error = eval_filter(
|
||||
&filter,
|
||||
&Value::String("Hello Bob!".to_string()),
|
||||
&variables,
|
||||
)
|
||||
.err()
|
||||
.unwrap();
|
||||
assert_eq!(error.source_info, SourceInfo::new(1, 7, 1, 20));
|
||||
assert_eq!(error.inner, RunnerError::InvalidRegex {});
|
||||
}
|
||||
}
|
@ -37,6 +37,7 @@ mod core;
|
||||
mod entry;
|
||||
mod error;
|
||||
mod expr;
|
||||
mod filter;
|
||||
mod hurl_file;
|
||||
mod json;
|
||||
mod multipart;
|
||||
@ -46,7 +47,6 @@ mod query;
|
||||
mod request;
|
||||
mod response;
|
||||
mod runner_options;
|
||||
mod subquery;
|
||||
mod template;
|
||||
mod value;
|
||||
mod xpath;
|
||||
|
@ -25,7 +25,7 @@ use super::value::Value;
|
||||
use super::xpath;
|
||||
use crate::http;
|
||||
use crate::jsonpath;
|
||||
use crate::runner::subquery::eval_subquery;
|
||||
use crate::runner::filter::eval_filter;
|
||||
use hurl_core::ast::*;
|
||||
use sha2::Digest;
|
||||
|
||||
@ -36,20 +36,19 @@ pub fn eval_query(
|
||||
variables: &HashMap<String, Value>,
|
||||
http_response: &http::Response,
|
||||
) -> QueryResult {
|
||||
let value = eval_query_value(query, variables, http_response)?;
|
||||
if let Some((_, subquery)) = &query.subquery {
|
||||
if let Some(value) = &value {
|
||||
eval_subquery(subquery, value, variables)
|
||||
let mut value = eval_query_value(query, variables, http_response)?;
|
||||
for (_, filter) in &query.filters {
|
||||
if let Some(existing_value) = value {
|
||||
value = eval_filter(filter, &existing_value, variables)?;
|
||||
} else {
|
||||
Err(Error {
|
||||
source_info: subquery.source_info.clone(),
|
||||
inner: RunnerError::SubqueryInvalidInput("none".to_string()),
|
||||
return Err(Error {
|
||||
source_info: filter.source_info.clone(),
|
||||
inner: RunnerError::CouldNotResolveProxyName,
|
||||
assert: false,
|
||||
})
|
||||
});
|
||||
}
|
||||
} else {
|
||||
Ok(value)
|
||||
}
|
||||
Ok(value)
|
||||
}
|
||||
pub fn eval_query_value(
|
||||
query: &Query,
|
||||
@ -355,7 +354,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 7, 1, 10),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -377,7 +376,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -399,7 +398,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -444,7 +443,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 10, 1, 19),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -467,7 +466,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 10, 1, 18),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -490,16 +489,16 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 10, 1, 18),
|
||||
},
|
||||
},
|
||||
subquery: Some((
|
||||
filters: vec![(
|
||||
Whitespace {
|
||||
value: "".to_string(),
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
},
|
||||
Subquery {
|
||||
Filter {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
value: SubqueryValue::Count {},
|
||||
value: FilterValue::Count {},
|
||||
},
|
||||
)),
|
||||
)],
|
||||
}
|
||||
}
|
||||
|
||||
@ -522,7 +521,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 10, 1, 18),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -544,7 +543,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 7, 1, 26),
|
||||
}),
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -566,7 +565,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 7, 1, 10),
|
||||
}),
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -578,7 +577,7 @@ pub mod tests {
|
||||
&Query {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
value: QueryValue::Status {},
|
||||
subquery: None
|
||||
filters: vec![]
|
||||
},
|
||||
&variables,
|
||||
&http::hello_http_response(),
|
||||
@ -609,7 +608,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(2, 8, 2, 14),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
};
|
||||
// let error = query_header.eval(http::hello_http_response()).err().unwrap();
|
||||
// assert_eq!(error.source_info.start, Pos { line: 1, column: 8 });
|
||||
@ -640,7 +639,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 8, 1, 16),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
};
|
||||
assert_eq!(
|
||||
eval_query(&query_header, &variables, &http::hello_http_response())
|
||||
@ -688,7 +687,7 @@ pub mod tests {
|
||||
attribute: None,
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
};
|
||||
assert_eq!(
|
||||
eval_query(&query, &variables, &response.clone())
|
||||
@ -718,7 +717,7 @@ pub mod tests {
|
||||
}),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
};
|
||||
assert_eq!(
|
||||
eval_query(&query, &variables, &response.clone())
|
||||
@ -748,7 +747,7 @@ pub mod tests {
|
||||
}),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
};
|
||||
assert_eq!(
|
||||
eval_query(&query, &variables, &response.clone())
|
||||
@ -778,7 +777,7 @@ pub mod tests {
|
||||
}),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
};
|
||||
assert_eq!(eval_query(&query, &variables, &response).unwrap(), None);
|
||||
}
|
||||
@ -869,7 +868,7 @@ pub mod tests {
|
||||
&Query {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
value: QueryValue::Body {},
|
||||
subquery: None
|
||||
filters: vec![]
|
||||
},
|
||||
&variables,
|
||||
&http::hello_http_response(),
|
||||
@ -882,7 +881,7 @@ pub mod tests {
|
||||
&Query {
|
||||
source_info: SourceInfo::new(1, 1, 1, 2),
|
||||
value: QueryValue::Body {},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
},
|
||||
&variables,
|
||||
&http::bytes_http_response(),
|
||||
@ -941,7 +940,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 7, 1, 10),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
};
|
||||
let error = eval_query(&query, &variables, &http::xml_two_users_http_response())
|
||||
.err()
|
||||
@ -996,7 +995,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -1037,7 +1036,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(1, 10, 1, 13),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
};
|
||||
|
||||
let error = eval_query(&jsonpath_query, &variables, &json_http_response())
|
||||
@ -1152,7 +1151,7 @@ pub mod tests {
|
||||
&Query {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
value: QueryValue::Bytes {},
|
||||
subquery: None
|
||||
filters: vec![]
|
||||
},
|
||||
&variables,
|
||||
&http::hello_http_response(),
|
||||
@ -1171,7 +1170,7 @@ pub mod tests {
|
||||
&Query {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
value: QueryValue::Sha256 {},
|
||||
subquery: None
|
||||
filters: vec![]
|
||||
},
|
||||
&variables,
|
||||
&http::Response {
|
||||
|
@ -301,7 +301,7 @@ pub struct Assert {
|
||||
pub struct Query {
|
||||
pub source_info: SourceInfo,
|
||||
pub value: QueryValue,
|
||||
pub subquery: Option<(Whitespace, Subquery)>,
|
||||
pub filters: Vec<(Whitespace, Filter)>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
@ -386,21 +386,6 @@ impl CookieAttributeName {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Subquery {
|
||||
pub source_info: SourceInfo,
|
||||
pub value: SubqueryValue,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum SubqueryValue {
|
||||
Regex {
|
||||
space0: Whitespace,
|
||||
value: RegexValue,
|
||||
},
|
||||
Count {},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Predicate {
|
||||
pub not: bool,
|
||||
@ -810,3 +795,20 @@ pub enum VariableValue {
|
||||
Float(Float),
|
||||
String(Template),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Filter {
|
||||
pub source_info: SourceInfo,
|
||||
pub value: FilterValue,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum FilterValue {
|
||||
Count {},
|
||||
Regex {
|
||||
space0: Whitespace,
|
||||
value: RegexValue,
|
||||
},
|
||||
EscapeUrl {},
|
||||
UnEscapeUrl {},
|
||||
}
|
||||
|
@ -549,9 +549,9 @@ impl Htmlable for Capture {
|
||||
impl Htmlable for Query {
|
||||
fn to_html(&self) -> String {
|
||||
let mut buffer = self.value.clone().to_html();
|
||||
if let Some((space, subquery)) = self.clone().subquery {
|
||||
for (space, filter) in self.clone().filters {
|
||||
buffer.push_str(space.to_html().as_str());
|
||||
buffer.push_str(subquery.to_html().as_str());
|
||||
buffer.push_str(filter.to_html().as_str());
|
||||
}
|
||||
buffer
|
||||
}
|
||||
@ -637,26 +637,6 @@ impl Htmlable for RegexValue {
|
||||
}
|
||||
}
|
||||
|
||||
impl Htmlable for Subquery {
|
||||
fn to_html(&self) -> String {
|
||||
let mut buffer = String::from("");
|
||||
match self.value.clone() {
|
||||
SubqueryValue::Regex { value, space0 } => {
|
||||
buffer.push_str("<span class=\"subquery-type\">regex</span>");
|
||||
buffer.push_str(space0.to_html().as_str());
|
||||
// buffer.push_str(
|
||||
// format!("<span class=\"string\">\"{}\"</span>", value.to_html()).as_str(),
|
||||
// );
|
||||
buffer.push_str(value.to_html().as_str());
|
||||
}
|
||||
SubqueryValue::Count {} => {
|
||||
buffer.push_str("<span class=\"subquery-type\">count</span>")
|
||||
}
|
||||
}
|
||||
buffer
|
||||
}
|
||||
}
|
||||
|
||||
impl Htmlable for CookiePath {
|
||||
fn to_html(&self) -> String {
|
||||
let mut buffer = String::from("");
|
||||
@ -1067,6 +1047,31 @@ impl Htmlable for Expr {
|
||||
}
|
||||
}
|
||||
|
||||
impl Htmlable for Filter {
|
||||
fn to_html(&self) -> String {
|
||||
self.value.to_html()
|
||||
}
|
||||
}
|
||||
|
||||
impl Htmlable for FilterValue {
|
||||
fn to_html(&self) -> String {
|
||||
match self {
|
||||
FilterValue::Count {} => "<span class=\"filter-type\">count</span>".to_string(),
|
||||
FilterValue::Regex { space0, value } => {
|
||||
let mut buffer = "".to_string();
|
||||
buffer.push_str("<span class=\"filter-type\">regex</span>");
|
||||
buffer.push_str(space0.to_html().as_str());
|
||||
buffer.push_str(value.to_html().as_str());
|
||||
buffer
|
||||
}
|
||||
FilterValue::EscapeUrl {} => "<span class=\"filter-type\">escapeUrl</span>".to_string(),
|
||||
FilterValue::UnEscapeUrl {} => {
|
||||
"<span class=\"filter-type\">unescapeUrl</span>".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_line_terminators(buffer: &mut String, line_terminators: Vec<LineTerminator>) {
|
||||
for line_terminator in line_terminators {
|
||||
buffer.push_str("<span class=\"line\">");
|
||||
|
@ -41,7 +41,7 @@ span.line:before {
|
||||
color: teal;
|
||||
}
|
||||
|
||||
.subquery-type {
|
||||
.filter-type {
|
||||
color: darkblue;
|
||||
}
|
||||
|
||||
|
101
packages/hurl_core/src/parser/filter.rs
Normal file
101
packages/hurl_core/src/parser/filter.rs
Normal file
@ -0,0 +1,101 @@
|
||||
use crate::ast::{Filter, FilterValue, SourceInfo};
|
||||
use crate::parser::combinators::choice;
|
||||
use crate::parser::primitives::{one_or_more_spaces, try_literal};
|
||||
use crate::parser::query::regex_value;
|
||||
use crate::parser::{Error, ParseError, ParseResult, Reader};
|
||||
|
||||
pub fn filter(reader: &mut Reader) -> ParseResult<'static, Filter> {
|
||||
let start = reader.state.pos.clone();
|
||||
let value = choice(
|
||||
vec![
|
||||
count_filter,
|
||||
regex_filter,
|
||||
escape_url_filter,
|
||||
unescape_url_filter,
|
||||
],
|
||||
reader,
|
||||
)
|
||||
.map_err(|e| {
|
||||
if e.recoverable {
|
||||
Error {
|
||||
pos: e.pos,
|
||||
recoverable: e.recoverable,
|
||||
inner: ParseError::Expecting {
|
||||
value: "filter".to_string(),
|
||||
},
|
||||
}
|
||||
} else {
|
||||
e
|
||||
}
|
||||
})?;
|
||||
let end = reader.state.pos.clone();
|
||||
let source_info = SourceInfo { start, end };
|
||||
Ok(Filter { source_info, value })
|
||||
}
|
||||
|
||||
fn count_filter(reader: &mut Reader) -> ParseResult<'static, FilterValue> {
|
||||
try_literal("count", reader)?;
|
||||
Ok(FilterValue::Count {})
|
||||
}
|
||||
|
||||
fn regex_filter(reader: &mut Reader) -> ParseResult<'static, FilterValue> {
|
||||
try_literal("regex", reader)?;
|
||||
let space0 = one_or_more_spaces(reader)?;
|
||||
let value = regex_value(reader)?;
|
||||
Ok(FilterValue::Regex { space0, value })
|
||||
}
|
||||
|
||||
fn escape_url_filter(reader: &mut Reader) -> ParseResult<'static, FilterValue> {
|
||||
try_literal("escapeUrl", reader)?;
|
||||
Ok(FilterValue::EscapeUrl {})
|
||||
}
|
||||
|
||||
fn unescape_url_filter(reader: &mut Reader) -> ParseResult<'static, FilterValue> {
|
||||
try_literal("unescapeUrl", reader)?;
|
||||
Ok(FilterValue::UnEscapeUrl {})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::ast::Pos;
|
||||
use crate::parser::ParseError;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_count() {
|
||||
let mut reader = Reader::init("count");
|
||||
assert_eq!(
|
||||
filter(&mut reader).unwrap(),
|
||||
Filter {
|
||||
source_info: SourceInfo::new(1, 1, 1, 6),
|
||||
value: FilterValue::Count {},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error() {
|
||||
let mut reader = Reader::init("xcount");
|
||||
let err = filter(&mut reader).err().unwrap();
|
||||
assert_eq!(
|
||||
err.inner,
|
||||
ParseError::Expecting {
|
||||
value: "filter".to_string()
|
||||
}
|
||||
);
|
||||
assert_eq!(err.pos, Pos { line: 1, column: 1 });
|
||||
assert!(err.recoverable);
|
||||
|
||||
let mut reader = Reader::init("regex 1");
|
||||
let err = filter(&mut reader).err().unwrap();
|
||||
assert_eq!(
|
||||
err.inner,
|
||||
ParseError::Expecting {
|
||||
value: "\" or /".to_string()
|
||||
}
|
||||
);
|
||||
assert_eq!(err.pos, Pos { line: 1, column: 7 });
|
||||
assert!(!err.recoverable);
|
||||
}
|
||||
}
|
@ -40,6 +40,7 @@ mod cookiepath;
|
||||
mod error;
|
||||
mod expr;
|
||||
mod filename;
|
||||
mod filter;
|
||||
mod json;
|
||||
mod parsers;
|
||||
mod predicate;
|
||||
@ -49,7 +50,6 @@ mod query;
|
||||
mod reader;
|
||||
mod sections;
|
||||
mod string;
|
||||
mod subquery;
|
||||
mod template;
|
||||
mod url;
|
||||
mod xml;
|
||||
|
@ -16,6 +16,7 @@
|
||||
*
|
||||
*/
|
||||
use crate::ast::*;
|
||||
use crate::parser::filter::filter;
|
||||
use crate::parser::{Error, ParseError};
|
||||
|
||||
use super::combinators::*;
|
||||
@ -23,36 +24,38 @@ use super::cookiepath::cookiepath;
|
||||
use super::primitives::*;
|
||||
use super::reader::Reader;
|
||||
use super::string::*;
|
||||
use super::subquery::subquery;
|
||||
use super::ParseResult;
|
||||
|
||||
pub fn query(reader: &mut Reader) -> ParseResult<'static, Query> {
|
||||
let start = reader.state.pos.clone();
|
||||
let value = query_value(reader)?;
|
||||
let end = reader.state.pos.clone();
|
||||
|
||||
let save = reader.state.clone();
|
||||
let space = zero_or_more_spaces(reader)?;
|
||||
let optional_subquery = if space.value.is_empty() {
|
||||
reader.state = save;
|
||||
None
|
||||
} else {
|
||||
match subquery(reader) {
|
||||
Ok(q) => Some((space, q)),
|
||||
let mut filters = vec![];
|
||||
loop {
|
||||
let save = reader.state.clone();
|
||||
let space = zero_or_more_spaces(reader)?;
|
||||
if space.value.is_empty() {
|
||||
break;
|
||||
}
|
||||
match filter(reader) {
|
||||
Ok(f) => {
|
||||
filters.push((space, f));
|
||||
}
|
||||
Err(e) => {
|
||||
if e.recoverable {
|
||||
reader.state = save;
|
||||
None
|
||||
break;
|
||||
} else {
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
let end = reader.state.pos.clone();
|
||||
Ok(Query {
|
||||
source_info: SourceInfo { start, end },
|
||||
value,
|
||||
subquery: optional_subquery,
|
||||
filters,
|
||||
})
|
||||
}
|
||||
|
||||
@ -214,7 +217,7 @@ mod tests {
|
||||
Query {
|
||||
source_info: SourceInfo::new(1, 1, 1, 7),
|
||||
value: QueryValue::Status {},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -227,7 +230,7 @@ mod tests {
|
||||
Query {
|
||||
source_info: SourceInfo::new(1, 1, 1, 7),
|
||||
value: QueryValue::Status {},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -372,4 +375,27 @@ mod tests {
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_with_filters() {
|
||||
let mut reader = Reader::init("body unescapeUrl ");
|
||||
assert_eq!(
|
||||
query(&mut reader).unwrap(),
|
||||
Query {
|
||||
source_info: SourceInfo::new(1, 1, 1, 17),
|
||||
value: QueryValue::Body {},
|
||||
filters: vec![(
|
||||
Whitespace {
|
||||
value: " ".to_string(),
|
||||
source_info: SourceInfo::new(1, 5, 1, 6)
|
||||
},
|
||||
Filter {
|
||||
source_info: SourceInfo::new(1, 6, 1, 17),
|
||||
value: FilterValue::UnEscapeUrl {},
|
||||
}
|
||||
)],
|
||||
}
|
||||
);
|
||||
assert_eq!(reader.state.cursor, 16);
|
||||
}
|
||||
}
|
||||
|
@ -729,7 +729,7 @@ mod tests {
|
||||
source_info: SourceInfo::new(2, 8, 2, 18),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
},
|
||||
space1: Whitespace {
|
||||
value: String::from(" "),
|
||||
@ -1026,12 +1026,12 @@ mod tests {
|
||||
quotes: false,
|
||||
elements: vec![TemplateElement::String {
|
||||
value: "toto".to_string(),
|
||||
encoded: "toto".to_string()
|
||||
encoded: "toto".to_string(),
|
||||
}],
|
||||
source_info: SourceInfo {
|
||||
start: Pos { line: 1, column: 1 },
|
||||
end: Pos { line: 1, column: 5 },
|
||||
}
|
||||
},
|
||||
})
|
||||
);
|
||||
let mut reader = Reader::init("\"123\"");
|
||||
@ -1041,12 +1041,12 @@ mod tests {
|
||||
quotes: true,
|
||||
elements: vec![TemplateElement::String {
|
||||
value: "123".to_string(),
|
||||
encoded: "123".to_string()
|
||||
encoded: "123".to_string(),
|
||||
}],
|
||||
source_info: SourceInfo {
|
||||
start: Pos { line: 1, column: 1 },
|
||||
end: Pos { line: 1, column: 6 },
|
||||
}
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
@ -1177,20 +1177,20 @@ mod tests {
|
||||
source_info: SourceInfo::new(1, 13, 1, 23),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capture_with_subquery() {
|
||||
fn test_capture_with_filter() {
|
||||
let mut reader = Reader::init("token: header \"Location\" regex \"token=(.*)\"");
|
||||
let capture0 = capture(&mut reader).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
capture0.query,
|
||||
Query {
|
||||
source_info: SourceInfo::new(1, 8, 1, 25),
|
||||
source_info: SourceInfo::new(1, 8, 1, 44),
|
||||
value: QueryValue::Header {
|
||||
space0: Whitespace {
|
||||
value: String::from(" "),
|
||||
@ -1206,14 +1206,14 @@ mod tests {
|
||||
},
|
||||
},
|
||||
|
||||
subquery: Some((
|
||||
filters: vec![(
|
||||
Whitespace {
|
||||
value: " ".to_string(),
|
||||
source_info: SourceInfo::new(1, 25, 1, 26),
|
||||
},
|
||||
Subquery {
|
||||
Filter {
|
||||
source_info: SourceInfo::new(1, 26, 1, 44),
|
||||
value: SubqueryValue::Regex {
|
||||
value: FilterValue::Regex {
|
||||
space0: Whitespace {
|
||||
value: " ".to_string(),
|
||||
source_info: SourceInfo::new(1, 31, 1, 32),
|
||||
@ -1228,14 +1228,14 @@ mod tests {
|
||||
}),
|
||||
},
|
||||
}
|
||||
)),
|
||||
)],
|
||||
}
|
||||
);
|
||||
assert_eq!(reader.state.cursor, 43);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capture_with_subquery_error() {
|
||||
fn test_capture_with_filter_error() {
|
||||
let mut reader = Reader::init("token: header \"Location\" regex ");
|
||||
let error = capture(&mut reader).err().unwrap();
|
||||
assert_eq!(
|
||||
@ -1294,7 +1294,7 @@ mod tests {
|
||||
source_info: SourceInfo::new(1, 8, 1, 18),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -268,8 +268,9 @@ impl ToJson for Assert {
|
||||
impl ToJson for Query {
|
||||
fn to_json(&self) -> JValue {
|
||||
let mut attributes = query_value_attributes(&self.value);
|
||||
if let Some((_, subquery)) = self.subquery.clone() {
|
||||
attributes.push(("subquery".to_string(), subquery.to_json()));
|
||||
if !self.filters.is_empty() {
|
||||
let filters = JValue::List(self.filters.iter().map(|(_, f)| f.to_json()).collect());
|
||||
attributes.push(("filters".to_string(), filters));
|
||||
}
|
||||
JValue::Object(attributes)
|
||||
}
|
||||
@ -327,28 +328,6 @@ fn query_value_attributes(query_value: &QueryValue) -> Vec<(String, JValue)> {
|
||||
attributes
|
||||
}
|
||||
|
||||
impl ToJson for Subquery {
|
||||
fn to_json(&self) -> JValue {
|
||||
self.value.to_json()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToJson for SubqueryValue {
|
||||
fn to_json(&self) -> JValue {
|
||||
let mut attributes = vec![];
|
||||
match self {
|
||||
SubqueryValue::Regex { value, .. } => {
|
||||
attributes.push(("type".to_string(), JValue::String("regex".to_string())));
|
||||
attributes.push(("expr".to_string(), value.to_json()));
|
||||
}
|
||||
SubqueryValue::Count { .. } => {
|
||||
attributes.push(("type".to_string(), JValue::String("count".to_string())));
|
||||
}
|
||||
}
|
||||
JValue::Object(attributes)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToJson for RegexValue {
|
||||
fn to_json(&self) -> JValue {
|
||||
match self {
|
||||
@ -519,6 +498,37 @@ impl ToJson for hurl_core::ast::JsonListElement {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToJson for Filter {
|
||||
fn to_json(&self) -> JValue {
|
||||
self.value.to_json()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToJson for FilterValue {
|
||||
fn to_json(&self) -> JValue {
|
||||
let mut attributes = vec![];
|
||||
match self {
|
||||
FilterValue::Regex { value, .. } => {
|
||||
attributes.push(("type".to_string(), JValue::String("regex".to_string())));
|
||||
attributes.push(("expr".to_string(), value.to_json()));
|
||||
}
|
||||
FilterValue::Count { .. } => {
|
||||
attributes.push(("type".to_string(), JValue::String("count".to_string())));
|
||||
}
|
||||
FilterValue::EscapeUrl { .. } => {
|
||||
attributes.push(("type".to_string(), JValue::String("escapeUrl".to_string())));
|
||||
}
|
||||
FilterValue::UnEscapeUrl { .. } => {
|
||||
attributes.push((
|
||||
"type".to_string(),
|
||||
JValue::String("unescapeUrl".to_string()),
|
||||
));
|
||||
}
|
||||
}
|
||||
JValue::Object(attributes)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use super::*;
|
||||
@ -666,7 +676,7 @@ pub mod tests {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
},
|
||||
},
|
||||
subquery: None,
|
||||
filters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,5 +114,12 @@ pub fn format_token(token: Token, color: bool) -> String {
|
||||
}
|
||||
}
|
||||
Token::Keyword(value) => value,
|
||||
Token::FilterType(value) => {
|
||||
if color {
|
||||
value.cyan().to_string()
|
||||
} else {
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ pub enum Token {
|
||||
SectionHeader(String),
|
||||
QueryType(String),
|
||||
PredicateType(String),
|
||||
FilterType(String),
|
||||
Not(String),
|
||||
Keyword(String),
|
||||
|
||||
@ -397,9 +398,9 @@ impl Tokenizable for Assert {
|
||||
impl Tokenizable for Query {
|
||||
fn tokenize(&self) -> Vec<Token> {
|
||||
let mut tokens: Vec<Token> = self.value.clone().tokenize();
|
||||
if let Some((space, subquery)) = &self.subquery {
|
||||
for (space, filter) in &self.filters {
|
||||
tokens.append(&mut space.clone().tokenize());
|
||||
tokens.append(&mut subquery.tokenize());
|
||||
tokens.append(&mut filter.tokenize());
|
||||
}
|
||||
tokens
|
||||
}
|
||||
@ -484,23 +485,6 @@ impl Tokenizable for CookieAttribute {
|
||||
}
|
||||
}
|
||||
|
||||
impl Tokenizable for Subquery {
|
||||
fn tokenize(&self) -> Vec<Token> {
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
match self.value.clone() {
|
||||
SubqueryValue::Regex { space0, value } => {
|
||||
tokens.push(Token::QueryType(String::from("regex")));
|
||||
tokens.append(&mut space0.tokenize());
|
||||
tokens.append(&mut value.tokenize());
|
||||
}
|
||||
SubqueryValue::Count { .. } => {
|
||||
tokens.push(Token::QueryType(String::from("count")));
|
||||
}
|
||||
}
|
||||
tokens
|
||||
}
|
||||
}
|
||||
|
||||
impl Tokenizable for Predicate {
|
||||
fn tokenize(&self) -> Vec<Token> {
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
@ -1087,3 +1071,20 @@ impl Tokenizable for VeryVerboseOption {
|
||||
tokens
|
||||
}
|
||||
}
|
||||
|
||||
impl Tokenizable for Filter {
|
||||
fn tokenize(&self) -> Vec<Token> {
|
||||
match self.value.clone() {
|
||||
FilterValue::Regex { space0, value } => {
|
||||
let mut tokens: Vec<Token> = vec![];
|
||||
tokens.push(Token::FilterType(String::from("regex")));
|
||||
tokens.append(&mut space0.tokenize());
|
||||
tokens.append(&mut value.tokenize());
|
||||
tokens
|
||||
}
|
||||
FilterValue::Count { .. } => vec![Token::FilterType(String::from("count"))],
|
||||
FilterValue::EscapeUrl { .. } => vec![Token::FilterType(String::from("escapeUrl"))],
|
||||
FilterValue::UnEscapeUrl { .. } => vec![Token::FilterType(String::from("unescapeUrl"))],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -254,10 +254,11 @@ impl Lintable<Query> for Query {
|
||||
Query {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
value: self.value.lint(),
|
||||
subquery: self
|
||||
.subquery
|
||||
.clone()
|
||||
.map(|(_, s)| (one_whitespace(), s.lint())),
|
||||
filters: self
|
||||
.filters
|
||||
.iter()
|
||||
.map(|(_, f)| (one_whitespace(), f.lint()))
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -314,19 +315,6 @@ impl Lintable<QueryValue> for QueryValue {
|
||||
}
|
||||
}
|
||||
|
||||
impl Lintable<Subquery> for Subquery {
|
||||
fn errors(&self) -> Vec<Error> {
|
||||
let errors = vec![];
|
||||
errors
|
||||
}
|
||||
|
||||
fn lint(&self) -> Subquery {
|
||||
let source_info = SourceInfo::new(0, 0, 0, 0);
|
||||
let value = self.value.lint();
|
||||
Subquery { source_info, value }
|
||||
}
|
||||
}
|
||||
|
||||
impl Lintable<RegexValue> for RegexValue {
|
||||
fn errors(&self) -> Vec<Error> {
|
||||
let errors = vec![];
|
||||
@ -340,22 +328,6 @@ impl Lintable<RegexValue> for RegexValue {
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Lintable<SubqueryValue> for SubqueryValue {
|
||||
fn errors(&self) -> Vec<Error> {
|
||||
let errors = vec![];
|
||||
errors
|
||||
}
|
||||
|
||||
fn lint(&self) -> SubqueryValue {
|
||||
match self {
|
||||
SubqueryValue::Regex { value, .. } => SubqueryValue::Regex {
|
||||
space0: one_whitespace(),
|
||||
value: value.lint(),
|
||||
},
|
||||
SubqueryValue::Count {} => SubqueryValue::Count {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Lintable<CookieAttribute> for CookieAttribute {
|
||||
fn errors(&self) -> Vec<Error> {
|
||||
@ -820,6 +792,37 @@ impl Lintable<EntryOption> for EntryOption {
|
||||
}
|
||||
}
|
||||
|
||||
impl Lintable<Filter> for Filter {
|
||||
fn errors(&self) -> Vec<Error> {
|
||||
let errors = vec![];
|
||||
errors
|
||||
}
|
||||
|
||||
fn lint(&self) -> Filter {
|
||||
Filter {
|
||||
source_info: SourceInfo::new(0, 0, 0, 0),
|
||||
value: self.value.lint(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Lintable<FilterValue> for FilterValue {
|
||||
fn errors(&self) -> Vec<Error> {
|
||||
let errors = vec![];
|
||||
errors
|
||||
}
|
||||
|
||||
fn lint(&self) -> FilterValue {
|
||||
match self {
|
||||
FilterValue::Regex { value, .. } => FilterValue::Regex {
|
||||
space0: one_whitespace(),
|
||||
value: value.lint(),
|
||||
},
|
||||
f => f.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
Loading…
Reference in New Issue
Block a user