Skip to content

Commit 6b59e05

Browse files
committedApr 22, 2020
Documentation
1 parent 76fb509 commit 6b59e05

File tree

3 files changed

+12
-3
lines changed

3 files changed

+12
-3
lines changed
 

‎README.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -124,10 +124,10 @@ in this format, the matching will be done according to the REP specification.
124124

125125
## License
126126

127-
Package grobotstxt is licensed under the terms of the
128-
Apache license. See [LICENSE](LICENSE) for more information.
127+
Like the original library, package grobotstxt is licensed under the terms of the
128+
Apache License, Version 2.0. See [LICENSE](LICENSE) for more information.
129129

130130
## Links
131131

132-
* Original project
132+
* Original project:
133133
[Google robots.txt parser and matcher library](https://github.com/google/robotstxt)

‎robots_cc.go

+8
Original file line numberDiff line numberDiff line change
@@ -606,6 +606,8 @@ func (m *RobotsMatcher) init(userAgents []string, path string) {
606606
m.userAgents = userAgents
607607
}
608608

609+
// AgentsAllowed parses the given robots.txt content, matching it against
610+
// the given userAgents and URI, and returns true if access is allowed.
609611
func (m *RobotsMatcher) AgentsAllowed(robotsBody string, userAgents []string, uri string) bool {
610612
// Line :487
611613
// The url is not normalized (escaped, percent encoded) here because the user
@@ -616,15 +618,21 @@ func (m *RobotsMatcher) AgentsAllowed(robotsBody string, userAgents []string, ur
616618
return !m.disallowed()
617619
}
618620

621+
// AgentsAllowed parses the given robots.txt content, matching it against
622+
// the given userAgents and URI, and returns true if access is allowed.
619623
func AgentsAllowed(robotsBody string, userAgents []string, uri string) bool {
620624
return NewRobotsMatcher().AgentsAllowed(robotsBody, userAgents, uri)
621625
}
622626

627+
// AgentAllowed parses the given robots.txt content, matching it against
628+
// the given userAgent and URI, and returns true if access is allowed.
623629
func (m *RobotsMatcher) AgentAllowed(robotsBody string, userAgent string, uri string) bool {
624630
// Line :498
625631
return m.AgentsAllowed(robotsBody, []string{userAgent}, uri)
626632
}
627633

634+
// AgentAllowed parses the given robots.txt content, matching it against
635+
// the given userAgent and URI, and returns true if access is allowed.
628636
func AgentAllowed(robotsBody string, userAgent string, uri string) bool {
629637
return NewRobotsMatcher().AgentAllowed(robotsBody, userAgent, uri)
630638
}

‎sitemaps.go

+1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ func (f *sitemapExtractor) Sitemaps(robotsBody string) []string {
99
return f.sitemaps
1010
}
1111

12+
// Sitemaps extracts all "Sitemap:" values from the given robots.txt content.
1213
func Sitemaps(robotsBody string) []string {
1314
return (&sitemapExtractor{}).Sitemaps(robotsBody)
1415
}

0 commit comments

Comments
 (0)
Please sign in to comment.