Module: check_mk
Branch: master
Commit: 84915168d759d23fb1c162152fd70fa8c0859bb6
URL:
http://git.mathias-kettner.de/git/?p=check_mk.git;a=commit;h=84915168d759d2…
Author: Lars Michelsen <lm(a)mathias-kettner.de>
Date: Fri Mar 24 10:13:58 2017 +0100
GUI crawl: Save web.log in results
Change-Id: I8c52ad6263075edf1fb15fc59a06f0ae2bd89bd1
---
tests/web/test_crawl.py | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/tests/web/test_crawl.py b/tests/web/test_crawl.py
index c83468d..f3ee934 100644
--- a/tests/web/test_crawl.py
+++ b/tests/web/test_crawl.py
@@ -10,6 +10,7 @@ import signal
import threading
import Queue
import traceback
+import shutil
from urlparse import urlsplit, parse_qsl, urlunsplit, urljoin
from urllib import urlencode
from bs4 import BeautifulSoup
@@ -318,6 +319,10 @@ class TestCrawler(object):
return var_dir() + "/crawl.report"
+ def web_log_file(self):
+ return var_dir() + "/craw-web.log"
+
+
def load_stats(self):
try:
self.stats = eval(file(self.stats_file()).read())
@@ -379,6 +384,9 @@ class TestCrawler(object):
os.rename(self.report_file()+".tmp", self.report_file())
+ # Save the web.log for later diagnose
+ shutil.copyfile(self.site.read_file("var/log/web.log"),
self.web_log_file())
+
if self.errors:
pytest.fail("Crawled %d URLs in %d seconds. Failures:\n%s" %
(len(self.visited), time.time() - self.started,
"\n".join(self.errors)))