# SINCE 0.7.0 \r
web.enableRpcAdministration = false\r
\r
+# Full path to a configurable robots.txt file. With this file you can control\r
+# what parts of your Gitblit server respectable robots are allowed to traverse.\r
+# http://googlewebmastercentral.blogspot.com/2008/06/improving-on-robots-exclusion-protocol.html\r
+#\r
+# SINCE 1.0.0\r
+web.robots.txt = \r
+\r
# If true, the web ui layout will respond and adapt to the browser's dimensions.\r
# if false, the web ui will use a 940px fixed-width layout.\r
# http://twitter.github.com/bootstrap/scaffolding.html#responsive\r
</servlet-mapping> \r
\r
\r
+ <!-- Robots.txt Servlet\r
+ <url-pattern> MUST match: \r
+ * Wicket Filter ignorePaths parameter -->\r
+ <servlet>\r
+ <servlet-name>RobotsTxtServlet</servlet-name>\r
+ <servlet-class>com.gitblit.RobotsTxtServlet</servlet-class>\r
+ </servlet>\r
+ <servlet-mapping>\r
+ <servlet-name>RobotsTxtServlet</servlet-name> \r
+ <url-pattern>/robots.txt</url-pattern>\r
+ </servlet-mapping>\r
+\r
+ \r
<!-- Git Access Restriction Filter\r
<url-pattern> MUST match: \r
* GitServlet\r
* PagesFilter <url-pattern>\r
* PagesServlet <url-pattern>\r
* com.gitblit.Constants.PAGES_PATH -->\r
- <param-value>git/,feed/,zip/,federation/,rpc/,pages/</param-value>\r
+ <param-value>git/,feed/,zip/,federation/,rpc/,pages/,robots.txt</param-value>\r
</init-param>\r
</filter>\r
<filter-mapping>\r
--- /dev/null
+/*\r
+ * Copyright 2012 gitblit.com.\r
+ *\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ * http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ */\r
+package com.gitblit;\r
+\r
+import java.io.File;\r
+import java.io.IOException;\r
+\r
+import javax.servlet.ServletException;\r
+import javax.servlet.http.HttpServlet;\r
+import javax.servlet.http.HttpServletRequest;\r
+import javax.servlet.http.HttpServletResponse;\r
+\r
+import com.gitblit.utils.FileUtils;\r
+import com.gitblit.utils.StringUtils;\r
+\r
+/**\r
+ * Handles requests for robots.txt\r
+ * \r
+ * @author James Moger\r
+ * \r
+ */\r
+public class RobotsTxtServlet extends HttpServlet {\r
+\r
+ private static final long serialVersionUID = 1L;\r
+\r
+ public RobotsTxtServlet() {\r
+ super();\r
+ }\r
+ \r
+ @Override\r
+ protected void doPost(HttpServletRequest request, HttpServletResponse response)\r
+ throws ServletException, java.io.IOException {\r
+ processRequest(request, response);\r
+ }\r
+\r
+ @Override\r
+ protected void doGet(HttpServletRequest request, HttpServletResponse response)\r
+ throws ServletException, IOException {\r
+ processRequest(request, response);\r
+ }\r
+\r
+ protected void processRequest(javax.servlet.http.HttpServletRequest request,\r
+ javax.servlet.http.HttpServletResponse response) throws javax.servlet.ServletException,\r
+ java.io.IOException {\r
+ String robotstxt = GitBlit.getString(Keys.web.robots.txt, null); \r
+ String content = "";\r
+ if (!StringUtils.isEmpty(robotstxt)) {\r
+ File robotsfile = new File(robotstxt);\r
+ if (robotsfile.exists()) {\r
+ content = FileUtils.readContent(robotsfile, "\n");\r
+ }\r
+ }\r
+ response.getWriter().append(content);\r
+ }\r
+}\r