Commit 0e5c63e8 authored by Ben Kelly's avatar Ben Kelly Committed by Commit Bot

URLPattern: Translate path-to-regexp parser to c++.

This CL translates the path-to-regexp parser from typescript to c++.
Its based on the code here:

https://github.com/pillarjs/path-to-regexp/blob/125c43e6481f68cc771a5af22b914acdb8c5ba1f/src/index.ts#L126-L232

We deviate from the path-to-regexp code in some terminology and output
structure.  In particular, we output a list of Part structures with
types of kFixed, kRegex, kSegmentWildcard, or kFullWildcard as discussed
in the design doc:

https://docs.google.com/document/d/17L6b3zlTHtyxQvOAvbK55gQOi5rrJLERwjt_sKXpzqc/edit#heading=h.ymw6rim68920

Note, this CL does deviate from the design doc as well in that we are
translating a more modern version of path-to-regexp than originally
planned.  This uses a "{...}?" style grouping for applying modifiers.
We also do not yet fully include support for the "*" wildcard character.
As such the grammar in the design doc does not really apply fully to
this CL.

Bug: 1141510
Change-Id: I1c3e4ef76587496301d2171fa9f76b51754dce0d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2518206
Commit-Queue: Ben Kelly <wanderview@chromium.org>
Reviewed-by: default avatarJeremy Roman <jbroman@chromium.org>
Cr-Commit-Position: refs/heads/master@{#826968}
parent 2c7f8a5d
......@@ -15,9 +15,12 @@ component("liburlpattern") {
sources = [
"parse.cc",
"parse.h",
"pattern.cc",
"pattern.h",
"tokenize.cc",
"tokenize.h",
"utils.cc",
"utils.h",
]
}
......@@ -30,8 +33,9 @@ test("liburlpattern_unittests") {
# Note, also update the local modifications in README.chromium.
sources = [
"liburlpattern_unittest.cc",
"parse_unittest.cc",
"tokenize_unittest.cc",
"utils_unittest.cc",
]
testonly = true
}
......@@ -19,6 +19,12 @@ Local Modifications:
third_party/liburlpattern/BUILD.gn
third_party/liburlpattern/parse.h
third_party/liburlpattern/parse.cc
third_party/liburlpattern/parse_unittest.cc
third_party/liburlpattern/pattern.h
third_party/liburlpattern/pattern.cc
third_party/liburlpattern/tokenize.h
third_party/liburlpattern/tokenize.cc
third_party/liburlpattern/tokenize_unittest.cc
third_party/liburlpattern/utils.h
third_party/liburlpattern/utils.cc
third_party/liburlpattern/utils_unittest.cc
......@@ -5,19 +5,312 @@
#include "third_party/liburlpattern/parse.h"
#include "third_party/abseil-cpp/absl/base/macros.h"
#include "third_party/abseil-cpp/absl/strings/str_format.h"
#include "third_party/liburlpattern/pattern.h"
#include "third_party/liburlpattern/tokenize.h"
#include "third_party/liburlpattern/utils.h"
// The following code is a translation from the path-to-regexp typescript at:
//
// https://github.com/pillarjs/path-to-regexp/blob/125c43e6481f68cc771a5af22b914acdb8c5ba1f/src/index.ts#L126-L232
namespace liburlpattern {
absl::StatusOr<Pattern> Parse(absl::string_view pattern) {
namespace {
// The "full wildcard" regex pattern. This regex value is treated specially
// resulting in a kFullWildcard Part instead of a kRegex Part.
static const char* kWildcardRegex = ".*";
// Helper class that tracks the parser state.
class State {
public:
State(std::vector<Token> token_list, absl::string_view delimiter_list)
: token_list_(std::move(token_list)),
segment_wildcard_regex_(
absl::StrFormat("[^%s]+?", EscapeString(delimiter_list))) {}
// Return true if there are more tokens to process.
bool HasMoreTokens() const { return index_ < token_list_.size(); }
// Attempt to consume the next Token, but only if it matches the given
// |type|. Returns a pointer to the Token on success or nullptr on failure.
const Token* TryConsume(TokenType type) {
ABSL_ASSERT(index_ < token_list_.size());
TokenType next_type = token_list_[index_].type;
if (next_type != type)
return nullptr;
// The last token should always be kEnd.
if ((index_ + 1) == token_list_.size())
ABSL_ASSERT(token_list_[index_].type == TokenType::kEnd);
return &(token_list_[index_++]);
}
// Consume the next Token requiring it to be the given |type|. If this
// is not possible then return an error.
absl::StatusOr<const Token*> MustConsume(TokenType type) {
ABSL_ASSERT(index_ < token_list_.size());
if (const Token* token = TryConsume(type))
return token;
return absl::InvalidArgumentError(
absl::StrFormat("Unexpected %s at %d, expected %s",
TokenTypeToString(token_list_[index_].type), index_,
TokenTypeToString(type)));
}
// Consume as many sequential kChar and kEscapedChar Tokens as possible
// appending them together into a single string value.
std::string ConsumeText() {
// Unfortunately we cannot use a view here and must copy into a new
// string. This is necessary to flatten escape sequences into
// a single value with other characters.
std::string result;
const Token* token = nullptr;
do {
token = TryConsume(TokenType::kChar);
if (!token)
token = TryConsume(TokenType::kEscapedChar);
if (token)
result.append(token->value.data(), token->value.size());
} while (token);
return result;
}
// Append the given Token value to the pending fixed value. This will
// be converted to a kFixed Part when we reach the end of a run of
// kChar and kEscapedChar tokens.
void AppendToPendingFixedValue(absl::string_view token_value) {
pending_fixed_value_.append(token_value.data(), token_value.size());
}
// Convert the pending fixed value, if any, to a kFixed Part. Has no effect
// if there is no pending value.
void MaybeAddPartFromPendingFixedValue() {
if (pending_fixed_value_.empty())
return;
part_list_.emplace_back(PartType::kFixed, std::move(pending_fixed_value_),
Modifier::kNone);
pending_fixed_value_ = "";
}
// Add a Part for the given set of tokens.
void AddPart(std::string prefix,
const Token* name_token,
const Token* regex_token,
std::string suffix,
const Token* modifier_token) {
// Convert the kModifier Token into a Modifier enum value.
Modifier modifier = Modifier::kNone;
if (modifier_token) {
ABSL_ASSERT(!modifier_token->value.empty());
switch (modifier_token->value[0]) {
case '?':
modifier = Modifier::kOptional;
break;
case '*':
modifier = Modifier::kZeroOrMore;
break;
case '+':
modifier = Modifier::kOneOrMore;
break;
default:
ABSL_ASSERT(false);
break;
}
}
// If there is no name or regex tokens then this is just a fixed string
// grouping; e.g. "{foo}?". The fixed string ends up in the prefix value
// since it consumed the entire text of the grouping. If the prefix value
// is empty then its an empty "{}" group and we return without adding any
// Part.
if (!name_token && !regex_token) {
ABSL_ASSERT(suffix.empty());
if (!prefix.empty())
part_list_.emplace_back(PartType::kFixed, std::move(prefix), modifier);
return;
}
// Determine the regex value. If there is a kRegex Token, then this is
// explicitly set by that Token. Otherwise a kName Token by itself gets
// an implicit regex value that matches through to the end of the segment.
// This is represented by the |segment_wildcard_regex_| value.
std::string regex_value;
if (regex_token)
regex_value = std::string(regex_token->value);
else
regex_value = segment_wildcard_regex_;
// Next determine the type of the Part. This depends on the regex value
// since we give certain values special treatment with their own type.
// A |segment_wildcard_regex_| is mapped to the kSegmentWildcard type. A
// |kWildcardRegex| is mapped to the kFullWildcard type. Otherwise
// the Part gets the kRegex type.
PartType type = PartType::kRegex;
if (regex_value == segment_wildcard_regex_) {
type = PartType::kSegmentWildcard;
regex_value = "";
} else if (regex_value == kWildcardRegex) {
type = PartType::kFullWildcard;
regex_value = "";
}
// Every kRegex, kSegmentWildcard, and kFullWildcard Part must have a
// group name. If there was a kName Token, then use the explicitly
// set name. Otherwise we generate a numeric based key for the name.
std::string name;
if (name_token)
name = std::string(name_token->value);
else if (regex_token)
name = GenerateKey();
// Finally add the part to the list.
part_list_.emplace_back(type, std::move(name), std::move(prefix),
std::move(regex_value), std::move(suffix),
modifier);
}
Pattern TakeAsPattern() { return Pattern(std::move(part_list_)); }
private:
// Generate a numeric key string to be used for groups that do not
// have an explicit kName Token.
std::string GenerateKey() { return absl::StrFormat("%d", next_key_++); }
// The input list of Token objects to process.
const std::vector<Token> token_list_;
// The special regex value corresponding to the default regex value
// given to a lone kName Token. This is a variable since its value
// is dependent on the |delimiter_list| passed to the constructor.
const std::string segment_wildcard_regex_;
// The output list of Pattern Part objects.
std::vector<Part> part_list_;
// A buffer of kChar and kEscapedChar values that are pending the creation
// of a kFixed Part.
std::string pending_fixed_value_;
// The index of the next Token in |token_list_|.
size_t index_ = 0;
// The next value to use when generating a numeric based name for Parts
// without explicit kName Tokens.
int next_key_ = 0;
};
} // namespace
absl::StatusOr<Pattern> Parse(absl::string_view pattern,
absl::string_view delimiter_list,
absl::string_view prefix_list) {
auto result = Tokenize(pattern);
if (!result.ok())
return result.status();
// TODO: Implement actual pattern parsing.
State state(std::move(result.value()), delimiter_list);
while (state.HasMoreTokens()) {
// Look for the sequence: <prefix char><name><regex><modifier>
// There could be from zero to all through of these tokens. For
// example:
// * "/:foo(bar)?" - all four tokens
// * "/" - just a char token
// * ":foo" - just a name token
// * "(bar)" - just a regex token
// * "/:foo" - char and name tokens
// * "/(bar)" - char and regex tokens
// * "/:foo?" - char, name, and modifier tokens
// * "/(bar)?" - char, regex, and modifier tokens
const Token* char_token = state.TryConsume(TokenType::kChar);
const Token* name_token = state.TryConsume(TokenType::kName);
const Token* regex_token = state.TryConsume(TokenType::kRegex);
// If there is a name or regex token then we need to add a Pattern Part
// immediately.
if (name_token || regex_token) {
// Determine if the char token is a valid prefix. Only characters in the
// configured prefix_list are automatically treated as prefixes. A
// kEscapedChar Token is never treated as a prefix.
absl::string_view prefix = char_token ? char_token->value : "";
if (prefix_list.find(prefix) == std::string::npos) {
// This is not a prefix character. Add it to the buffered characters
// to be added as a kFixed Part later.
state.AppendToPendingFixedValue(prefix);
prefix = absl::string_view();
}
// If we have any buffered characters in a pending fixed value, then
// convert them into a kFixed Part now.
state.MaybeAddPartFromPendingFixedValue();
// kName and kRegex tokens can optionally be followed by a modifier.
const Token* modifier_token = state.TryConsume(TokenType::kModifier);
// Add the Part for the name and regex tokens.
state.AddPart(std::string(prefix), name_token, regex_token, /*suffix=*/"",
modifier_token);
continue;
}
// There was neither a kRegex or kName token, so consider if we just have a
// fixed string part. A fixed string can consist of kChar or kEscapedChar
// tokens. These just get added to the buffered pending fixed value for
// now. It will get converted to a kFixed Part later.
const Token* fixed_token = char_token;
if (!fixed_token)
fixed_token = state.TryConsume(TokenType::kEscapedChar);
if (fixed_token) {
state.AppendToPendingFixedValue(fixed_token->value);
continue;
}
// There was not a kChar or kEscapedChar token, so we no we are at the end
// of any fixed string. Therefore convert the pending fixed value into a
// kFixed Part now.
state.MaybeAddPartFromPendingFixedValue();
// Look for the sequence:
//
// <open><char prefix><name><regex><char suffix><close><modifier>
//
// The open and close are required, but the other tokens are optional.
// For example:
// * "{a:foo(.*)b}?" - all tokens present
// * "{:foo}?" - just name and modifier tokens
// * "{(.*)}?" - just regex and modifier tokens
// * "{ab}?" - just char and modifier tokens
const Token* open_token = state.TryConsume(TokenType::kOpen);
if (open_token) {
std::string prefix = state.ConsumeText();
const Token* name_token = state.TryConsume(TokenType::kName);
const Token* regex_token = state.TryConsume(TokenType::kRegex);
std::string suffix = state.ConsumeText();
auto result = state.MustConsume(TokenType::kClose);
if (!result.ok())
return result.status();
const Token* modifier_token = state.TryConsume(TokenType::kModifier);
state.AddPart(std::move(prefix), name_token, regex_token,
std::move(suffix), modifier_token);
continue;
}
// We didn't find any tokens allowed by the syntax, so we should be
// at the end of the token list. If there is a syntax error, this
// is where it will typically be caught.
auto result = state.MustConsume(TokenType::kEnd);
if (!result.ok())
return result.status();
}
return Pattern();
return state.TakeAsPattern();
}
} // namespace liburlpattern
......@@ -18,8 +18,16 @@ class Pattern;
// Parse a pattern string and return the result. The input |pattern| must
// consist of ASCII characters. Any non-ASCII characters should be UTF-8
// encoded and % escaped, similar to URLs, prior to calling this function.
// |delimiter_list| contains a list of characters that are considered segment
// separators when performing a kSegmentWildcard. This is the behavior you
// get when you specify a name `:foo` without a custom regular expression.
// The |prefix_list| contains a list of characters to automatically treat
// as a prefix when they appear before a kName or kRegex Token; e.g. "/:foo",
// includes the leading "/" as the prefix for the "foo" named group by default.
COMPONENT_EXPORT(LIBURLPATTERN)
absl::StatusOr<Pattern> Parse(absl::string_view pattern);
absl::StatusOr<Pattern> Parse(absl::string_view pattern,
absl::string_view delimiter_list = "/#?",
absl::string_view prefix_list = "./");
} // namespace liburlpattern
......
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by an MIT-style license that can be
// found in the LICENSE file or at https://opensource.org/licenses/MIT.
#include "third_party/liburlpattern/parse.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/liburlpattern/pattern.h"
namespace liburlpattern {
void RunParseTest(absl::string_view pattern,
absl::StatusOr<std::vector<Part>> expected) {
auto result = Parse(pattern);
ASSERT_EQ(result.ok(), expected.ok())
<< "parse status '" << result.status() << "' for: " << pattern;
if (!expected.ok()) {
ASSERT_EQ(result.status().code(), expected.status().code())
<< "parse status code for: " << pattern;
EXPECT_NE(result.status().message().find(expected.status().message()),
std::string::npos)
<< "parse message '" << result.status().message()
<< "' does not contain '" << expected.status().message()
<< "' for: " << pattern;
return;
}
const auto& expected_part_list = expected.value();
const auto& part_list = result.value().PartList();
EXPECT_EQ(part_list.size(), expected_part_list.size())
<< "parser should produce expected number of parts for: " << pattern;
for (size_t i = 0; i < part_list.size() && i < expected_part_list.size();
++i) {
EXPECT_EQ(part_list[i], expected_part_list[i])
<< "token at index " << i << " wrong for: " << pattern;
}
}
TEST(ParseTest, EmptyPattern) {
RunParseTest("", std::vector<Part>());
}
TEST(ParseTest, InvalidChar) {
RunParseTest("/foo/ßar", absl::InvalidArgumentError("Invalid character"));
}
TEST(ParseTest, Fixed) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kNone),
};
RunParseTest("/foo", expected_parts);
}
TEST(ParseTest, FixedInGroup) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kNone),
};
RunParseTest("{/foo}", expected_parts);
}
TEST(ParseTest, FixedAndEmptyGroup) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/f", Modifier::kNone),
Part(PartType::kFixed, "oo", Modifier::kNone),
};
RunParseTest("/f{}oo", expected_parts);
}
TEST(ParseTest, FixedInGroupWithOptionalModifier) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kOptional),
};
RunParseTest("{/foo}?", expected_parts);
}
TEST(ParseTest, FixedInGroupWithZeroOrMoreModifier) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kZeroOrMore),
};
RunParseTest("{/foo}*", expected_parts);
}
TEST(ParseTest, FixedInGroupWithOneOrMoreModifier) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kOneOrMore),
};
RunParseTest("{/foo}+", expected_parts);
}
TEST(ParseTest, FixedInEarlyTerminatedGroup) {
RunParseTest("{/foo", absl::InvalidArgumentError("expected CLOSE"));
}
TEST(ParseTest, FixedInUnbalancedGroup) {
RunParseTest("{/foo?", absl::InvalidArgumentError("expected CLOSE"));
}
TEST(ParseTest, FixedWithModifier) {
RunParseTest("/foo?", absl::InvalidArgumentError("Unexpected MODIFIER"));
}
TEST(ParseTest, Regex) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/f", Modifier::kNone),
Part(PartType::kRegex, /*name=*/"0", /*prefix=*/"", "oo", /*suffix=*/"",
Modifier::kNone),
};
RunParseTest("/f(oo)", expected_parts);
}
TEST(ParseTest, RegexInGroup) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/f", Modifier::kNone),
Part(PartType::kRegex, /*name=*/"0", /*prefix=*/"", "oo", /*suffix=*/"",
Modifier::kNone),
};
RunParseTest("/f{(oo)}", expected_parts);
}
TEST(ParseTest, RegexWithPrefixAndSuffixInGroup) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/", Modifier::kNone),
Part(PartType::kRegex, /*name=*/"0", /*prefix=*/"f", "o", /*suffix=*/"o",
Modifier::kNone),
};
RunParseTest("/{f(o)o}", expected_parts);
}
TEST(ParseTest, RegexAndRegexInGroup) {
RunParseTest("/f{(o)(o)}", absl::InvalidArgumentError("expected CLOSE"));
}
TEST(ParseTest, RegexWithPrefix) {
std::vector<Part> expected_parts = {
Part(PartType::kRegex, /*name=*/"0", /*prefix=*/"/", "foo", /*suffix=*/"",
Modifier::kNone),
};
RunParseTest("/(foo)", expected_parts);
}
TEST(ParseTest, RegexWithNameAndPrefix) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kNone),
Part(PartType::kRegex, /*name=*/"bar", /*prefix=*/"/", "[^/]+?",
/*suffix=*/"", Modifier::kNone),
};
RunParseTest("/foo/:bar([^/]+?)", expected_parts);
}
TEST(ParseTest, RegexWithNameAndPrefixInGroup) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo/", Modifier::kNone),
Part(PartType::kRegex, /*name=*/"bar", /*prefix=*/"", "[^/]+?",
/*suffix=*/"", Modifier::kNone),
};
RunParseTest("/foo/{:bar([^/]+?)}", expected_parts);
}
TEST(ParseTest, RegexWithModifier) {
std::vector<Part> expected_parts = {
Part(PartType::kRegex, /*name=*/"0", /*prefix=*/"/", "foo",
/*suffix=*/"", Modifier::kOptional),
};
RunParseTest("/(foo)?", expected_parts);
}
TEST(ParseTest, RegexLikeFullWildcard) {
std::vector<Part> expected_parts = {
Part(PartType::kFullWildcard, /*name=*/"0", /*prefix=*/"/", "",
/*suffix=*/"", Modifier::kNone),
};
RunParseTest("/(.*)", expected_parts);
}
TEST(ParseTest, Name) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kNone),
Part(PartType::kSegmentWildcard, /*name=*/"bar", /*prefix=*/"",
/*value=*/"", /*suffix=*/"", Modifier::kNone),
};
RunParseTest("/foo:bar", expected_parts);
}
TEST(ParseTest, NameInGroup) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kNone),
Part(PartType::kSegmentWildcard, /*name=*/"bar", /*prefix=*/"",
/*value=*/"", /*suffix=*/"", Modifier::kNone),
};
RunParseTest("/foo{:bar}", expected_parts);
}
TEST(ParseTest, NameAndNameInGroup) {
RunParseTest("/foo{:bar:baz}", absl::InvalidArgumentError("expected CLOSE"));
}
TEST(ParseTest, NameWithPrefixAndSuffixInGroup) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo/", Modifier::kNone),
Part(PartType::kSegmentWildcard, /*name=*/"bar", /*prefix=*/"data_",
/*value=*/"", /*suffix=*/".jpg", Modifier::kNone),
};
RunParseTest("/foo/{data_:bar.jpg}", expected_parts);
}
TEST(ParseTest, NameWithPrefix) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kNone),
Part(PartType::kSegmentWildcard, /*name=*/"bar", /*prefix=*/"/",
/*value=*/"", /*suffix=*/"", Modifier::kNone),
};
RunParseTest("/foo/:bar", expected_parts);
}
TEST(ParseTest, NameWithEscapedPrefix) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo/", Modifier::kNone),
Part(PartType::kSegmentWildcard, /*name=*/"bar", /*prefix=*/"",
/*value=*/"", /*suffix=*/"", Modifier::kNone),
};
RunParseTest("/foo\\/:bar", expected_parts);
}
TEST(ParseTest, NameWithCustomRegex) {
std::vector<Part> expected_parts = {
Part(PartType::kFixed, "/foo", Modifier::kNone),
Part(PartType::kRegex, /*name=*/"bar", /*prefix=*/"", "[^/]+?",
/*suffix=*/"", Modifier::kNone),
};
RunParseTest("/foo:bar([^/]+?)", expected_parts);
}
TEST(ParseTest, NameWithModifier) {
std::vector<Part> expected_parts = {
Part(PartType::kSegmentWildcard, /*name=*/"foo", /*prefix=*/"/",
/*value=*/"", /*suffix=*/"", Modifier::kOptional),
};
RunParseTest("/:foo?", expected_parts);
}
} // namespace liburlpattern
// Copyright 2020 The Chromium Authors. All rights reserved.
// Copyright 2014 Blake Embrey (hello@blakeembrey.com)
// Use of this source code is governed by an MIT-style license that can be
// found in the LICENSE file or at https://opensource.org/licenses/MIT.
#include "third_party/liburlpattern/pattern.h"
#include "third_party/abseil-cpp/absl/base/macros.h"
#include "third_party/abseil-cpp/absl/strings/str_format.h"
namespace liburlpattern {
std::ostream& operator<<(std::ostream& o, Part part) {
o << "{ type:" << static_cast<int>(part.type) << ", name:" << part.name
<< ", prefix:" << part.prefix << ", value:" << part.value
<< ", suffix:" << part.suffix
<< ", modifier:" << static_cast<int>(part.modifier) << " }";
return o;
}
Part::Part(PartType t, std::string v, Modifier m)
: type(t), value(std::move(v)), modifier(m) {
ABSL_ASSERT(type == PartType::kFixed);
}
Part::Part(PartType t,
std::string n,
std::string p,
std::string v,
std::string s,
Modifier m)
: type(t),
name(std::move(n)),
prefix(std::move(p)),
value(std::move(v)),
suffix(std::move(s)),
modifier(m) {
ABSL_ASSERT(type != PartType::kFixed);
ABSL_ASSERT(!name.empty());
if (type == PartType::kFullWildcard || type == PartType::kSegmentWildcard)
ABSL_ASSERT(value.empty());
}
Pattern::Pattern(std::vector<Part> part_list)
: part_list_(std::move(part_list)) {}
} // namespace liburlpattern
// Copyright 2020 The Chromium Authors. All rights reserved.
// Copyright 2014 Blake Embrey (hello@blakeembrey.com)
// Use of this source code is governed by an MIT-style license that can be
// found in the LICENSE file or at https://opensource.org/licenses/MIT.
#ifndef THIRD_PARTY_LIBURLPATTERN_PATTERN_H_
#define THIRD_PARTY_LIBURLPATTERN_PATTERN_H_
#include <string>
#include <vector>
#include "base/component_export.h"
// NOTE: This code is a work-in-progress. It is not ready for production use.
namespace liburlpattern {
enum class PartType {
// A fixed, non-variable part of the pattern. Consists of kChar and
// kEscapedChar Tokens.
kFixed,
// A part with a custom regular expression.
kRegex,
// A part that matches any character to the next segment separator.
kSegmentWildcard,
// A part that matches any character to the end of the input string.
kFullWildcard,
};
enum class Modifier {
// No modifier.
kNone,
// The `?` modifier.
kOptional,
// The `*` modifier.
kZeroOrMore,
// The `+` modifier.
kOneOrMore,
};
// A structure representing one part of a parsed Pattern. A full Pattern
// consists of an ordered sequence of Part objects.
struct COMPONENT_EXPORT(LIBURLPATTERN) Part {
// The type of the Part.
const PartType type = PartType::kFixed;
// The name of the Part. Only kRegex, kSegmentWildcard, and kFullWildcard
// parts may have a |name|. kFixed parts must have an empty |name|.
const std::string name;
// A fixed string prefix that is expected before any regex or wildcard match.
// kFixed parts must have an empty |prefix|.
const std::string prefix;
// The meaning of the |value| depends on the |type| of the Part. For kFixed
// parts the |value| contains the fixed string to match. For kRegex parts
// the |value| contains a regular expression to match. The |value| is empty
// for kSegmentWildcard and kFullWildcard parts since the |type| encodes what
// to match.
const std::string value;
// A fixed string prefix that is expected after any regex or wildcard match.
// kFixed parts must have an empty |suffix|.
const std::string suffix;
// A |modifier| indicating whether the Part is optional and/or repeated. Any
// Part type may have a |modifier|.
const Modifier modifier = Modifier::kNone;
Part(PartType type, std::string value, Modifier modifier);
Part(PartType type,
std::string name,
std::string prefix,
std::string value,
std::string suffix,
Modifier modifier);
Part() = default;
};
COMPONENT_EXPORT(LIBURLPATTERN)
inline bool operator==(const Part& lh, const Part& rh) {
return lh.name == rh.name && lh.prefix == rh.prefix && lh.value == rh.value &&
lh.suffix == rh.suffix && lh.modifier == rh.modifier;
}
inline bool operator!=(const Part& lh, const Part& rh) {
return !(lh == rh);
}
COMPONENT_EXPORT(LIBURLPATTERN)
std::ostream& operator<<(std::ostream& o, Part part);
// This class represents a successfully parsed pattern string. It will contain
// an intermediate representation that can be used to generate either a regular
// expression string or to directly match against input strings. Not all
// patterns are supported for direct matching.
class COMPONENT_EXPORT(LIBURLPATTERN) Pattern {
// TODO: Implement pattern details.
public:
explicit Pattern(std::vector<Part> part_list);
const std::vector<Part>& PartList() const { return part_list_; }
private:
std::vector<Part> part_list_;
};
} // namespace liburlpattern
......
......@@ -7,6 +7,10 @@
#include "third_party/abseil-cpp/absl/strings/str_format.h"
// The following code is a translation from the path-to-regexp typescript at:
//
// https://github.com/pillarjs/path-to-regexp/blob/125c43e6481f68cc771a5af22b914acdb8c5ba1f/src/index.ts#L4-L124
namespace liburlpattern {
namespace {
......@@ -24,16 +28,34 @@ bool IsNameChar(char c) {
} // namespace
const char* TokenTypeToString(TokenType type) {
switch (type) {
case TokenType::kOpen:
return "OPEN";
case TokenType::kClose:
return "CLOSE";
case TokenType::kRegex:
return "REGEX";
case TokenType::kName:
return "NAME";
case TokenType::kChar:
return "CHAR";
case TokenType::kEscapedChar:
return "ESCAPED_CHAR";
case TokenType::kModifier:
return "MODIFIER";
case TokenType::kEnd:
return "END";
}
}
std::ostream& operator<<(std::ostream& o, Token token) {
o << "{ type:" << static_cast<int>(token.type) << ", index:" << token.index
<< ", value:" << token.value << " }";
return o;
}
// Split the input pattern into a list of tokens. Originally translated to
// c++ from:
//
// https://github.com/pillarjs/path-to-regexp/blob/125c43e6481f68cc771a5af22b914acdb8c5ba1f/src/index.ts#L4-L124
// Split the input pattern into a list of tokens.
absl::StatusOr<std::vector<Token>> Tokenize(absl::string_view pattern) {
// Verify that all characters are valid before parsing. This simplifies the
// following logic.
......
......@@ -39,17 +39,19 @@ enum class TokenType {
kEnd,
};
const char* TokenTypeToString(TokenType type);
// Simple structure representing a single lexical token.
struct COMPONENT_EXPORT(LIBURLPATTERN) Token {
// Indicate the token type.
TokenType type = TokenType::kEnd;
const TokenType type = TokenType::kEnd;
// Index of the start of this token in the original pattern string.
size_t index = 0;
const size_t index = 0;
// The value of the token. May be one or many characters depending on type.
// May be null zero characters for the kEnd type.
absl::string_view value;
const absl::string_view value;
Token(TokenType t, size_t i, absl::string_view v)
: type(t), index(i), value(v) {}
......@@ -61,7 +63,6 @@ inline bool operator==(const Token& lh, const Token& rh) {
return lh.type == rh.type && lh.index == rh.index && lh.value == rh.value;
}
COMPONENT_EXPORT(LIBURLPATTERN)
inline bool operator!=(const Token& lh, const Token& rh) {
return !(lh == rh);
}
......
......@@ -8,7 +8,7 @@
namespace liburlpattern {
void RunTokenizerTest(absl::string_view pattern,
void RunTokenizeTest(absl::string_view pattern,
absl::StatusOr<std::vector<Token>> expected) {
auto result = Tokenize(pattern);
ASSERT_EQ(result.ok(), expected.ok()) << "lexer status for: " << pattern;
......@@ -33,7 +33,14 @@ void RunTokenizerTest(absl::string_view pattern,
}
}
TEST(TokenizerTest, Chars) {
TEST(TokenizeTest, EmptyPattern) {
std::vector<Token> expected_tokens = {
Token(TokenType::kEnd, 0, absl::string_view()),
};
RunTokenizeTest("", expected_tokens);
}
TEST(TokenizeTest, Chars) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kChar, 1, "f"),
......@@ -41,10 +48,10 @@ TEST(TokenizerTest, Chars) {
Token(TokenType::kChar, 3, "o"),
Token(TokenType::kEnd, 4, absl::string_view()),
};
RunTokenizerTest("/foo", expected_tokens);
RunTokenizeTest("/foo", expected_tokens);
}
TEST(TokenizerTest, CharsWithClosingParen) {
TEST(TokenizeTest, CharsWithClosingParen) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kChar, 1, "f"),
......@@ -53,10 +60,10 @@ TEST(TokenizerTest, CharsWithClosingParen) {
Token(TokenType::kChar, 4, ")"),
Token(TokenType::kEnd, 5, absl::string_view()),
};
RunTokenizerTest("/foo)", expected_tokens);
RunTokenizeTest("/foo)", expected_tokens);
}
TEST(TokenizerTest, EscapedChar) {
TEST(TokenizeTest, EscapedChar) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kEscapedChar, 1, "f"),
......@@ -64,10 +71,10 @@ TEST(TokenizerTest, EscapedChar) {
Token(TokenType::kChar, 4, "o"),
Token(TokenType::kEnd, 5, absl::string_view()),
};
RunTokenizerTest("/\\foo", expected_tokens);
RunTokenizeTest("/\\foo", expected_tokens);
}
TEST(TokenizerTest, EscapedColon) {
TEST(TokenizeTest, EscapedColon) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kEscapedChar, 1, ":"),
......@@ -76,10 +83,10 @@ TEST(TokenizerTest, EscapedColon) {
Token(TokenType::kChar, 5, "o"),
Token(TokenType::kEnd, 6, absl::string_view()),
};
RunTokenizerTest("/\\:foo", expected_tokens);
RunTokenizeTest("/\\:foo", expected_tokens);
}
TEST(TokenizerTest, EscapedParen) {
TEST(TokenizeTest, EscapedParen) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kEscapedChar, 1, "("),
......@@ -89,10 +96,10 @@ TEST(TokenizerTest, EscapedParen) {
Token(TokenType::kEscapedChar, 6, ")"),
Token(TokenType::kEnd, 8, absl::string_view()),
};
RunTokenizerTest("/\\(foo\\)", expected_tokens);
RunTokenizeTest("/\\(foo\\)", expected_tokens);
}
TEST(TokenizerTest, EscapedCurlyBrace) {
TEST(TokenizeTest, EscapedCurlyBrace) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kEscapedChar, 1, "{"),
......@@ -102,38 +109,38 @@ TEST(TokenizerTest, EscapedCurlyBrace) {
Token(TokenType::kEscapedChar, 6, "}"),
Token(TokenType::kEnd, 8, absl::string_view()),
};
RunTokenizerTest("/\\{foo\\}", expected_tokens);
RunTokenizeTest("/\\{foo\\}", expected_tokens);
}
TEST(TokenizerTest, EscapedCharAtEnd) {
RunTokenizerTest("/foo\\",
TEST(TokenizeTest, EscapedCharAtEnd) {
RunTokenizeTest("/foo\\",
absl::InvalidArgumentError("Trailing escape character"));
}
TEST(TokenizerTest, EscapedInvalidChar) {
TEST(TokenizeTest, EscapedInvalidChar) {
// Use a single byte invalid character since the escape only applies to the
// next byte character.
RunTokenizerTest("\\\xff", absl::InvalidArgumentError("Invalid character"));
RunTokenizeTest("\\\xff", absl::InvalidArgumentError("Invalid character"));
}
TEST(TokenizerTest, Name) {
TEST(TokenizeTest, Name) {
std::vector<Token> expected_tokens = {
Token(TokenType::kName, 0, "Foo_1"),
Token(TokenType::kEnd, 6, absl::string_view()),
};
RunTokenizerTest(":Foo_1", expected_tokens);
RunTokenizeTest(":Foo_1", expected_tokens);
}
TEST(TokenizerTest, NameWithZeroLength) {
RunTokenizerTest("/:/foo",
TEST(TokenizeTest, NameWithZeroLength) {
RunTokenizeTest("/:/foo",
absl::InvalidArgumentError("Missing parameter name"));
}
TEST(TokenizerTest, NameWithInvalidChar) {
RunTokenizerTest("/:fooßar", absl::InvalidArgumentError("Invalid character"));
TEST(TokenizeTest, NameWithInvalidChar) {
RunTokenizeTest("/:fooßar", absl::InvalidArgumentError("Invalid character"));
}
TEST(TokenizerTest, NameAndFileExtension) {
TEST(TokenizeTest, NameAndFileExtension) {
std::vector<Token> expected_tokens = {
Token(TokenType::kName, 0, "foo"),
Token(TokenType::kChar, 4, "."),
......@@ -142,10 +149,10 @@ TEST(TokenizerTest, NameAndFileExtension) {
Token(TokenType::kChar, 7, "g"),
Token(TokenType::kEnd, 8, absl::string_view()),
};
RunTokenizerTest(":foo.jpg", expected_tokens);
RunTokenizeTest(":foo.jpg", expected_tokens);
}
TEST(TokenizerTest, NameInPath) {
TEST(TokenizeTest, NameInPath) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kName, 1, "foo"),
......@@ -155,91 +162,91 @@ TEST(TokenizerTest, NameInPath) {
Token(TokenType::kChar, 8, "r"),
Token(TokenType::kEnd, 9, absl::string_view()),
};
RunTokenizerTest("/:foo/bar", expected_tokens);
RunTokenizeTest("/:foo/bar", expected_tokens);
}
TEST(TokenizerTest, Regex) {
TEST(TokenizeTest, Regex) {
std::vector<Token> expected_tokens = {
Token(TokenType::kRegex, 0, "foo"),
Token(TokenType::kEnd, 5, absl::string_view()),
};
RunTokenizerTest("(foo)", expected_tokens);
RunTokenizeTest("(foo)", expected_tokens);
}
TEST(TokenizerTest, RegexWithZeroLength) {
RunTokenizerTest("()", absl::InvalidArgumentError("Missing regex"));
TEST(TokenizeTest, RegexWithZeroLength) {
RunTokenizeTest("()", absl::InvalidArgumentError("Missing regex"));
}
TEST(TokenizerTest, RegexWithInvalidChar) {
RunTokenizerTest("(ßar)", absl::InvalidArgumentError("Invalid character"));
TEST(TokenizeTest, RegexWithInvalidChar) {
RunTokenizeTest("(ßar)", absl::InvalidArgumentError("Invalid character"));
}
TEST(TokenizerTest, RegexWithoutClosingParen) {
RunTokenizerTest("(foo", absl::InvalidArgumentError("Unbalanced regex"));
TEST(TokenizeTest, RegexWithoutClosingParen) {
RunTokenizeTest("(foo", absl::InvalidArgumentError("Unbalanced regex"));
}
TEST(TokenizerTest, RegexWithNestedCapturingGroup) {
RunTokenizerTest("(f(oo))", absl::InvalidArgumentError(
TEST(TokenizeTest, RegexWithNestedCapturingGroup) {
RunTokenizeTest("(f(oo))", absl::InvalidArgumentError(
"Unnamed capturing groups are not allowed"));
}
TEST(TokenizerTest, RegexWithNestedNamedCapturingGroup) {
TEST(TokenizeTest, RegexWithNestedNamedCapturingGroup) {
std::vector<Token> expected_tokens = {
Token(TokenType::kRegex, 0, "f(?oo)"),
Token(TokenType::kEnd, 8, absl::string_view()),
};
RunTokenizerTest("(f(?oo))", expected_tokens);
RunTokenizeTest("(f(?oo))", expected_tokens);
}
TEST(TokenizerTest, RegexWithNestedNonCapturingGroup) {
TEST(TokenizeTest, RegexWithNestedNonCapturingGroup) {
std::vector<Token> expected_tokens = {
Token(TokenType::kRegex, 0, "f(?:oo)"),
Token(TokenType::kEnd, 9, absl::string_view()),
};
RunTokenizerTest("(f(?:oo))", expected_tokens);
RunTokenizeTest("(f(?:oo))", expected_tokens);
}
TEST(TokenizerTest, RegexWithAssertion) {
TEST(TokenizeTest, RegexWithAssertion) {
std::vector<Token> expected_tokens = {
Token(TokenType::kRegex, 0, "f(?<y)x"),
Token(TokenType::kEnd, 9, absl::string_view()),
};
RunTokenizerTest("(f(?<y)x)", expected_tokens);
RunTokenizeTest("(f(?<y)x)", expected_tokens);
}
TEST(TokenizerTest, RegexWithNestedUnbalancedGroup) {
RunTokenizerTest("(f(?oo)", absl::InvalidArgumentError("Unbalanced regex"));
TEST(TokenizeTest, RegexWithNestedUnbalancedGroup) {
RunTokenizeTest("(f(?oo)", absl::InvalidArgumentError("Unbalanced regex"));
}
TEST(TokenizerTest, RegexWithTrailingParen) {
RunTokenizerTest("(f(", absl::InvalidArgumentError("Unbalanced regex"));
TEST(TokenizeTest, RegexWithTrailingParen) {
RunTokenizeTest("(f(", absl::InvalidArgumentError("Unbalanced regex"));
}
TEST(TokenizerTest, RegexWithEscapedChar) {
TEST(TokenizeTest, RegexWithEscapedChar) {
std::vector<Token> expected_tokens = {
Token(TokenType::kRegex, 0, "f\\(oo"),
Token(TokenType::kEnd, 7, absl::string_view()),
};
RunTokenizerTest("(f\\(oo)", expected_tokens);
RunTokenizeTest("(f\\(oo)", expected_tokens);
}
TEST(TokenizerTest, RegexWithTrailingEscapedChar) {
RunTokenizerTest("(foo\\",
TEST(TokenizeTest, RegexWithTrailingEscapedChar) {
RunTokenizeTest("(foo\\",
absl::InvalidArgumentError("Trailing escape character"));
}
TEST(TokenizerTest, RegexWithEscapedInvalidChar) {
TEST(TokenizeTest, RegexWithEscapedInvalidChar) {
// Use a single byte invalid character since the escape only applies to the
// next byte character.
RunTokenizerTest("(\\\xff)", absl::InvalidArgumentError("Invalid character"));
RunTokenizeTest("(\\\xff)", absl::InvalidArgumentError("Invalid character"));
}
TEST(TokenizerTest, RegexWithLeadingQuestion) {
RunTokenizerTest("(?foo)",
TEST(TokenizeTest, RegexWithLeadingQuestion) {
RunTokenizeTest("(?foo)",
absl::InvalidArgumentError("Regex cannot start with '?'"));
}
TEST(TokenizerTest, RegexInPath) {
TEST(TokenizeTest, RegexInPath) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kChar, 1, "f"),
......@@ -253,10 +260,10 @@ TEST(TokenizerTest, RegexInPath) {
Token(TokenType::kChar, 12, "r"),
Token(TokenType::kEnd, 13, absl::string_view()),
};
RunTokenizerTest("/foo/(.*)/bar", expected_tokens);
RunTokenizeTest("/foo/(.*)/bar", expected_tokens);
}
TEST(TokenizerTest, ModifierStar) {
TEST(TokenizeTest, ModifierStar) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kOpen, 1, "{"),
......@@ -267,10 +274,10 @@ TEST(TokenizerTest, ModifierStar) {
Token(TokenType::kModifier, 6, "*"),
Token(TokenType::kEnd, 7, absl::string_view()),
};
RunTokenizerTest("/{foo}*", expected_tokens);
RunTokenizeTest("/{foo}*", expected_tokens);
}
TEST(TokenizerTest, ModifierPlus) {
TEST(TokenizeTest, ModifierPlus) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kOpen, 1, "{"),
......@@ -281,10 +288,10 @@ TEST(TokenizerTest, ModifierPlus) {
Token(TokenType::kModifier, 6, "+"),
Token(TokenType::kEnd, 7, absl::string_view()),
};
RunTokenizerTest("/{foo}+", expected_tokens);
RunTokenizeTest("/{foo}+", expected_tokens);
}
TEST(TokenizerTest, ModifierQuestion) {
TEST(TokenizeTest, ModifierQuestion) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kOpen, 1, "{"),
......@@ -295,10 +302,10 @@ TEST(TokenizerTest, ModifierQuestion) {
Token(TokenType::kModifier, 6, "?"),
Token(TokenType::kEnd, 7, absl::string_view()),
};
RunTokenizerTest("/{foo}?", expected_tokens);
RunTokenizeTest("/{foo}?", expected_tokens);
}
TEST(TokenizerTest, Everything) {
TEST(TokenizeTest, Everything) {
std::vector<Token> expected_tokens = {
Token(TokenType::kChar, 0, "/"),
Token(TokenType::kEscapedChar, 1, "f"),
......@@ -313,7 +320,7 @@ TEST(TokenizerTest, Everything) {
Token(TokenType::kModifier, 21, "*"),
Token(TokenType::kEnd, 22, absl::string_view()),
};
RunTokenizerTest("/\\foo/(a(?.*)){/:bar}*", expected_tokens);
RunTokenizeTest("/\\foo/(a(?.*)){/:bar}*", expected_tokens);
}
} // namespace liburlpattern
// Copyright 2020 The Chromium Authors. All rights reserved.
// Copyright 2014 Blake Embrey (hello@blakeembrey.com)
// Use of this source code is governed by an MIT-style license that can be
// found in the LICENSE file or at https://opensource.org/licenses/MIT.
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/liburlpattern/parse.h"
#include "third_party/liburlpattern/pattern.h"
#include "third_party/liburlpattern/utils.h"
namespace liburlpattern {
TEST(ParseTest, ValidChar) {
auto result = Parse("/foo/bar");
EXPECT_TRUE(result.ok());
}
TEST(ParseTest, InvalidChar) {
auto result = Parse("/foo/ßar");
EXPECT_FALSE(result.ok());
EXPECT_EQ(result.status().code(), absl::StatusCode::kInvalidArgument);
EXPECT_NE(result.status().message().find("Invalid character"),
std::string::npos);
std::string EscapeString(absl::string_view input) {
std::string result;
result.reserve(input.size());
const absl::string_view special_characters(".+*?=^!:${}()[]|/\\");
for (auto& c : input) {
if (special_characters.find(c) != std::string::npos)
result += '\\';
result += c;
}
return result;
}
} // namespace liburlpattern
// Copyright 2020 The Chromium Authors. All rights reserved.
// Copyright 2014 Blake Embrey (hello@blakeembrey.com)
// Use of this source code is governed by an MIT-style license that can be
// found in the LICENSE file or at https://opensource.org/licenses/MIT.
#ifndef THIRD_PARTY_LIBURLPATTERN_UTILS_H_
#define THIRD_PARTY_LIBURLPATTERN_UTILS_H_
#include <string>
#include "base/component_export.h"
#include "third_party/abseil-cpp/absl/strings/string_view.h"
namespace liburlpattern {
// Escape an input string so that it may be safely included in a
// regular expression.
COMPONENT_EXPORT(LIBURLPATTERN)
std::string EscapeString(absl::string_view input);
} // namespace liburlpattern
#endif // THIRD_PARTY_LIBURLPATTERN_UTILS_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by an MIT-style license that can be
// found in the LICENSE file or at https://opensource.org/licenses/MIT.
#include "third_party/liburlpattern/utils.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace liburlpattern {
TEST(UtilsTest, EscapeStringDot) {
EXPECT_EQ(EscapeString("index.html"), "index\\.html");
}
TEST(UtilsTest, EscapeStringPlus) {
EXPECT_EQ(EscapeString("foo+"), "foo\\+");
}
TEST(UtilsTest, EscapeStringStar) {
EXPECT_EQ(EscapeString("foo*"), "foo\\*");
}
TEST(UtilsTest, EscapeStringQuestion) {
EXPECT_EQ(EscapeString("foo?"), "foo\\?");
}
TEST(UtilsTest, EscapeStringEquals) {
EXPECT_EQ(EscapeString("foo=bar"), "foo\\=bar");
}
TEST(UtilsTest, EscapeStringCaret) {
EXPECT_EQ(EscapeString("^foo"), "\\^foo");
}
TEST(UtilsTest, EscapeStringBang) {
EXPECT_EQ(EscapeString("!foo"), "\\!foo");
}
TEST(UtilsTest, EscapeStringColon) {
EXPECT_EQ(EscapeString(":foo"), "\\:foo");
}
TEST(UtilsTest, EscapeStringDollar) {
EXPECT_EQ(EscapeString("foo$"), "foo\\$");
}
TEST(UtilsTest, EscapeStringCurlyBraces) {
EXPECT_EQ(EscapeString("{foo}"), "\\{foo\\}");
}
TEST(UtilsTest, EscapeStringParens) {
EXPECT_EQ(EscapeString("(foo)"), "\\(foo\\)");
}
TEST(UtilsTest, EscapeStringSquareBrackets) {
EXPECT_EQ(EscapeString("[foo]"), "\\[foo\\]");
}
TEST(UtilsTest, EscapeStringPipe) {
EXPECT_EQ(EscapeString("foo|bar"), "foo\\|bar");
}
TEST(UtilsTest, EscapeStringSlash) {
EXPECT_EQ(EscapeString("/foo/bar"), "\\/foo\\/bar");
}
TEST(UtilsTest, EscapeStringBackslash) {
EXPECT_EQ(EscapeString("\\d"), "\\\\d");
}
} // namespace liburlpattern
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment