in core/unittest/processor/ProcessorFilterNativeUnittest.cpp [270:731]
void ProcessorFilterNativeUnittest::TestBaseFilter() {
// case 1
{
Json::Value root;
root["operator"] = "and";
Json::Value operands1;
operands1["key"] = "key1";
operands1["exp"] = ".*value1";
operands1["type"] = "regex";
Json::Value operands2;
operands2["key"] = "key2";
operands2["exp"] = "value2.*";
operands2["type"] = "regex";
root["operands"].append(operands1);
root["operands"].append(operands2);
Json::Value config;
config["ConditionExp"] = root;
config["DiscardingNonUTF8"] = true;
// run function
ProcessorFilterNative& processor = *(new ProcessorFilterNative);
ProcessorInstance processorInstance(&processor, getPluginMeta());
APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext));
// case 1 : the field are all provided, only one matched
auto sourceBuffer1 = std::make_shared<SourceBuffer>();
PipelineEventGroup eventGroup1(sourceBuffer1);
std::string inJson = R"({
"events" :
[
{
"contents" :
{
"key1" : "value1xxxxx",
"key2" : "value2xxxxx"
},
"timestampNanosecond" : 0,
"timestamp" : 12345678901,
"type" : 1
},
{
"contents" :
{
"key1" : "abcdeavalue1",
"key2" : "value2xxxxx"
},
"timestampNanosecond" : 0,
"timestamp" : 12345678901,
"type" : 1
}
]
})";
eventGroup1.FromJsonString(inJson);
// run function
std::vector<PipelineEventGroup> eventGroupList1;
eventGroupList1.emplace_back(std::move(eventGroup1));
processorInstance.Process(eventGroupList1);
std::string outJson = eventGroupList1[0].ToJsonString();
// judge result
std::string expectJson = R"({
"events" :
[
{
"contents" :
{
"key1" : "abcdeavalue1",
"key2" : "value2xxxxx"
},
"timestamp" : 12345678901,
"timestampNanosecond" : 0,
"type" : 1
}
]
})";
APSARA_TEST_STREQ_FATAL(CompactJson(expectJson).c_str(), CompactJson(outJson).c_str());
// case 2 : NOT all fields exist, it
auto sourceBuffer2 = std::make_shared<SourceBuffer>();
PipelineEventGroup eventGroup2(sourceBuffer2);
inJson = R"({
"events" :
[
{
"contents" :
{
"key1" : "abcvalue1"
},
"timestampNanosecond" : 0,
"timestamp" : 12345678901,
"type" : 1
}
]
})";
eventGroup2.FromJsonString(inJson);
// run function
std::vector<PipelineEventGroup> eventGroupList2;
eventGroupList2.emplace_back(std::move(eventGroup2));
processorInstance.Process(eventGroupList2);
outJson = eventGroupList2[0].ToJsonString();
// judge result
APSARA_TEST_STREQ_FATAL("null", CompactJson(outJson).c_str());
}
// case 2
{
// a: int, b: string c: ip, d: date
const char* jsonStr
= "{\n"
" \"operator\": \"and\",\n"
" \"operands\": [\n"
" {\n"
" \"operator\": \"and\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"a\",\n"
" \"exp\": \"\\\\d+\"\n"
" },\n"
" \t{\n"
" \t \"operator\": \"not\",\n"
" \t \"operands\": [\n"
" \t {\n"
" \t \"type\": \"regex\",\n"
" \"key\": \"d\",\n"
" \"exp\": \"20\\\\d{1,2}-\\\\d{1,2}-\\\\d{1,2}\"\n"
" \t }\n"
" \t ]\n"
" \t}\n"
" ]\n"
" },\n"
" {\n"
" \"operator\": \"or\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"b\",\n"
" \"exp\": \"\\\\S+\"\n"
" },\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"c\",\n"
" \"exp\": "
"\"((2[0-4]\\\\d|25[0-5]|[01]?\\\\d\\\\d?)\\\\.){3}(2[0-4]\\\\d|25[0-5]|[01]?\\\\d\\\\d?)\"\n"
" }\n"
" ]\n"
" }\n"
" ]\n"
"}";
Json::Reader reader;
Json::Value rootNode;
APSARA_TEST_TRUE_FATAL(reader.parse(jsonStr, rootNode));
// init
Json::Value config;
// (a and not d) and (b or c)
config["ConditionExp"] = rootNode;
config["DiscardingNonUTF8"] = true;
ProcessorFilterNative& processor = *(new ProcessorFilterNative);
ProcessorInstance processorInstance(&processor, getPluginMeta());
APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext));
auto sourceBuffer1 = std::make_shared<SourceBuffer>();
PipelineEventGroup eventGroup1(sourceBuffer1);
std::string inJson = R"({
"events" :
[
{
"contents" :
{
"a" : "100",
"b" : "xxx",
"c" : "192.168.1.1",
"d" : "2008-08-08"
},
"timestampNanosecond" : 0,
"timestamp" : 12345678901,
"type" : 1
},
{
"contents" :
{
"a" : "100",
"b" : "xxx",
"c" : "888.168.1.1",
"d" : "1999-1-1"
},
"timestampNanosecond" : 0,
"timestamp" : 12345678901,
"type" : 1
},
{
"contents" :
{
"a" : "aaa",
"b" : "xxx",
"c" : "8.8.8.8",
"d" : "2222-22-22"
},
"timestampNanosecond" : 0,
"timestamp" : 12345678901,
"type" : 1
}
]
})";
eventGroup1.FromJsonString(inJson);
// run function
std::vector<PipelineEventGroup> eventGroupList1;
eventGroupList1.emplace_back(std::move(eventGroup1));
processorInstance.Process(eventGroupList1);
std::string outJson = eventGroupList1[0].ToJsonString();
// judge result
// judge result
std::string expectJson = R"({
"events" :
[
{
"contents" :
{
"a" : "100",
"b" : "xxx",
"c" : "888.168.1.1",
"d" : "1999-1-1"
},
"timestamp" : 12345678901,
"timestampNanosecond" : 0,
"type" : 1
}
]
})";
APSARA_TEST_STREQ_FATAL(CompactJson(expectJson).c_str(), CompactJson(outJson).c_str());
APSARA_TEST_EQUAL_FATAL(
2, processorInstance.mInEventsTotal->GetValue() - processorInstance.mOutEventsTotal->GetValue());
}
{
const char* jsonStr = "{\n"
" \"operator\": \"and\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"a\",\n"
" \"exp\": \"regex1\"\n"
" },\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"b\",\n"
" \"exp\": \"regex2\"\n"
" }\n"
" ]\n"
"}";
Json::Reader reader;
Json::Value rootNode;
APSARA_TEST_TRUE_FATAL(reader.parse(jsonStr, rootNode));
// init
Json::Value config;
config["ConditionExp"] = rootNode;
config["DiscardingNonUTF8"] = true;
ProcessorFilterNative& processor = *(new ProcessorFilterNative);
ProcessorInstance processorInstance(&processor, getPluginMeta());
APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext));
}
{
const char* jsonStr = "{\n"
" \"key\": \"a\",\n"
" \"exp\": \"xxx\",\n"
" \"type\": \"regex\"\n"
"}";
Json::Reader reader;
Json::Value rootNode;
APSARA_TEST_TRUE_FATAL(reader.parse(jsonStr, rootNode));
// init
Json::Value config;
config["ConditionExp"] = rootNode;
config["DiscardingNonUTF8"] = true;
ProcessorFilterNative& processor = *(new ProcessorFilterNative);
ProcessorInstance processorInstance(&processor, getPluginMeta());
APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext));
}
{
// not operator
const char* jsonStr = "{\n"
" \"operator\": \"not\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"a\",\n"
" \"exp\": \"regex1\"\n"
" }\n"
" ]\n"
"}";
Json::Reader reader;
Json::Value rootNode;
APSARA_TEST_TRUE_FATAL(reader.parse(jsonStr, rootNode));
// init
Json::Value config;
config["ConditionExp"] = rootNode;
config["DiscardingNonUTF8"] = true;
ProcessorFilterNative& processor = *(new ProcessorFilterNative);
ProcessorInstance processorInstance(&processor, getPluginMeta());
APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext));
}
{
// missing reg
const char* jsonStr = "{\n"
" \"operator\": \"and\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"a\",\n"
" \"exp\": \"regex1\"\n"
" },\n"
" {\n"
" \"operator\": \"or\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"b\"\n"
" },\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"c\",\n"
" \"exp\": \"regex3\"\n"
" }\n"
" ]\n"
" }\n"
" ]\n"
"}";
Json::Reader reader;
Json::Value rootNode;
APSARA_TEST_TRUE_FATAL(reader.parse(jsonStr, rootNode));
// init
Json::Value config;
config["ConditionExp"] = rootNode;
config["DiscardingNonUTF8"] = true;
ProcessorFilterNative& processor = *(new ProcessorFilterNative);
ProcessorInstance processorInstance(&processor, getPluginMeta());
APSARA_TEST_TRUE_FATAL(!processorInstance.Init(config, mContext));
}
{
// missing right
const char* jsonStr = "{\n"
" \"operator\": \"and\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"a\",\n"
" \"exp\": \"regex1\"\n"
" },\n"
" {\n"
" \"operator\": \"or\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"b\",\n"
" \"exp\": \"regex2\"\n"
" }\n"
" ]\n"
" }\n"
" ]\n"
"}";
Json::Reader reader;
Json::Value rootNode;
APSARA_TEST_TRUE_FATAL(reader.parse(jsonStr, rootNode));
// init
Json::Value config;
config["ConditionExp"] = rootNode;
config["DiscardingNonUTF8"] = true;
ProcessorFilterNative& processor = *(new ProcessorFilterNative);
ProcessorInstance processorInstance(&processor, getPluginMeta());
APSARA_TEST_TRUE_FATAL(!processorInstance.Init(config, mContext));
}
{
// missing op
const char* jsonStr = "{\n"
" \"operator\": \"and\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"a\",\n"
" \"exp\": \"regex1\"\n"
" },\n"
" {\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"b\",\n"
" \"exp\": \"regex2\"\n"
" },\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"c\",\n"
" \"exp\": \"regex3\"\n"
" }\n"
" ]\n"
" }\n"
" ]\n"
"}";
Json::Reader reader;
Json::Value rootNode;
APSARA_TEST_TRUE_FATAL(reader.parse(jsonStr, rootNode));
// init
Json::Value config;
config["ConditionExp"] = rootNode;
config["DiscardingNonUTF8"] = true;
ProcessorFilterNative& processor = *(new ProcessorFilterNative);
ProcessorInstance processorInstance(&processor, getPluginMeta());
APSARA_TEST_TRUE_FATAL(!processorInstance.Init(config, mContext));
}
// redundant fields
{
const char* jsonStr = "{\n"
" \"operator\": \"and\",\n"
" \"operands\": [\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"b\",\n"
" \"exp\": \"regex2\"\n"
" },\n"
" {\n"
" \"type\": \"regex\",\n"
" \"key\": \"c\",\n"
" \"exp\": \"regex3\"\n"
" }\n"
" ],\n"
" \"type\": \"regex\",\n"
" \"key\": \"c\",\n"
" \"exp\": \"regex3\"\n"
"}";
Json::Reader reader;
Json::Value rootNode;
APSARA_TEST_TRUE_FATAL(reader.parse(jsonStr, rootNode));
// init
Json::Value config;
config["ConditionExp"] = rootNode;
config["DiscardingNonUTF8"] = true;
ProcessorFilterNative& processor = *(new ProcessorFilterNative);
ProcessorInstance processorInstance(&processor, getPluginMeta());
APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext));
APSARA_TEST_TRUE(processor.mConditionExp->GetNodeType() == OPERATOR_NODE);
}
}