diff --git a/web_no_crawler/README.rst b/web_no_crawler/README.rst new file mode 100644 index 00000000..3f626252 --- /dev/null +++ b/web_no_crawler/README.rst @@ -0,0 +1 @@ +**This file is going to be generated by oca-gen-addon-readme.** diff --git a/web_no_crawler/__init__.py b/web_no_crawler/__init__.py new file mode 100644 index 00000000..37186625 --- /dev/null +++ b/web_no_crawler/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2018 Ventor, Xpansa Group () +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). + +from . import controllers diff --git a/web_no_crawler/__openerp__.py b/web_no_crawler/__openerp__.py new file mode 100644 index 00000000..b68fa33d --- /dev/null +++ b/web_no_crawler/__openerp__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2018 Ventor, Xpansa Group () +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). + +{ + 'name': 'Disallow indexing completely via robots.txt', + 'author': 'Ventor, Xpansa Group, Odoo Community Association (OCA)', + 'website': 'https://github.com/OCA/website/tree/8.0', + 'category': 'web', + 'version': '8.0.1.0.0', + 'depends': [ + 'base', + ], + 'installable': True, + 'application': False, + 'license': 'AGPL-3', +} diff --git a/web_no_crawler/controllers/__init__.py b/web_no_crawler/controllers/__init__.py new file mode 100644 index 00000000..cb0d94a6 --- /dev/null +++ b/web_no_crawler/controllers/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2018 Ventor, Xpansa Group () +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). + +from . import main diff --git a/web_no_crawler/controllers/main.py b/web_no_crawler/controllers/main.py new file mode 100644 index 00000000..8ac0f6c0 --- /dev/null +++ b/web_no_crawler/controllers/main.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2018 Ventor, Xpansa Group () +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). + +import openerp +from openerp import http + + +class Main(openerp.addons.web.controllers.main.Home): + + @http.route('/robots.txt', type='http', auth="none") + def robots(self): + return http.request.make_response( + "User-agent: *\nDisallow: /", + [('Content-Type', 'text/plain')], + ) diff --git a/web_no_crawler/readme/CONTRIBUTORS.rst b/web_no_crawler/readme/CONTRIBUTORS.rst new file mode 100644 index 00000000..1fa8ce6e --- /dev/null +++ b/web_no_crawler/readme/CONTRIBUTORS.rst @@ -0,0 +1 @@ +* Nedas Zilinskas (http://xpansa.com) diff --git a/web_no_crawler/readme/CREDITS.rst b/web_no_crawler/readme/CREDITS.rst new file mode 100644 index 00000000..eed428e7 --- /dev/null +++ b/web_no_crawler/readme/CREDITS.rst @@ -0,0 +1,3 @@ +The development of this module has been financially supported by: + +* Ventor, Xpansa Group () diff --git a/web_no_crawler/readme/DESCRIPTION.rst b/web_no_crawler/readme/DESCRIPTION.rst new file mode 100644 index 00000000..8e20bb0a --- /dev/null +++ b/web_no_crawler/readme/DESCRIPTION.rst @@ -0,0 +1,2 @@ +This module was written to implement a robots.txt file to prevent web crawlers (like google) from indexing pages. +This module does not depend on Website module. diff --git a/web_no_crawler/readme/INSTALL.rst b/web_no_crawler/readme/INSTALL.rst new file mode 100644 index 00000000..428aa030 --- /dev/null +++ b/web_no_crawler/readme/INSTALL.rst @@ -0,0 +1,3 @@ +Installation as usual. No specific installation steps / configuration required. + +**WARNING:** this module is not to be used with `Website` module as it has a separate functionality for robots.txt. diff --git a/web_no_crawler/readme/ROADMAP.rst b/web_no_crawler/readme/ROADMAP.rst new file mode 100644 index 00000000..3bcd4a08 --- /dev/null +++ b/web_no_crawler/readme/ROADMAP.rst @@ -0,0 +1 @@ +* Investigate possibilies for compatibility with `Website` module as it has a separate functionality for robots.txt. diff --git a/web_no_crawler/readme/USAGE.rst b/web_no_crawler/readme/USAGE.rst new file mode 100644 index 00000000..db4aee1d --- /dev/null +++ b/web_no_crawler/readme/USAGE.rst @@ -0,0 +1,3 @@ +To use this module, you need to: + +No configuration needed. Once installed adds robots.txt (ex.: http://example.org/robots.txt). diff --git a/web_no_crawler/static/description/icon.png b/web_no_crawler/static/description/icon.png new file mode 100644 index 00000000..3a0328b5 Binary files /dev/null and b/web_no_crawler/static/description/icon.png differ