Add user-agent blocking for dealing with webscrapers
Change-Id: I9c26791a9f96bb2e87f0f427f7ff4adcb46a2b9e
diff --git a/molecule/default/verify.yml b/molecule/default/verify.yml
index 5e2e15c..fbe12ca 100644
--- a/molecule/default/verify.yml
+++ b/molecule/default/verify.yml
@@ -28,6 +28,14 @@
register: webpage
failed_when: "'This file is served from static.example.com' not in webpage.content"
+ - name: Test that bad user agents are blocked with 403
+ uri:
+ url: http://127.0.0.1/
+ headers:
+ Host: "static.example.com"
+ User-Agent: "Mozilla/5.0 (compatible; AhrefsBot/7.0; +http://ahrefs.com/robot/)"
+ status_code: 403
+
- name: Test that the static site is the default site
uri:
url: http://127.0.0.1/