Skip to content

Commit ef1a46a

Browse files
authored
Merge pull request #90 from scrapinghub/logging-update
remove dead logging code + upgrade scrapy versions in travis cli
2 parents 0a5db72 + fcc4388 commit ef1a46a

File tree

5 files changed

+11
-54
lines changed

5 files changed

+11
-54
lines changed

.travis.yml

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,17 @@ matrix:
66
- python: 2.7
77
env: TOXENV=py27
88
- python: 2.7
9-
env: TOXENV=py27-scrapy1.0
9+
env: TOXENV=py27-scrapy1.5
1010
- python: 2.7
11-
env: TOXENV=py27-scrapy1.1
12-
- python: 2.7
13-
env: TOXENV=py27-scrapy1.2
11+
env: TOXENV=py27-scrapy1.6
1412
- python: 3.5
1513
env: TOXENV=py35
1614
- python: 3.6
1715
env: TOXENV=py36
1816
- python: 3.6
19-
env: TOXENV=py36-scrapy1.1
17+
env: TOXENV=py36-scrapy1.5
2018
- python: 3.6
21-
env: TOXENV=py36-scrapy1.2
19+
env: TOXENV=py36-scrapy1.6
2220
script: tox
2321

2422
deploy:

requirements-dev.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
bumpversion==0.5.3
22
fabric
3-
requests==2.9.1
3+
requests==2.22.0
44
mock==1.3.0
55
pytest==2.9.1
66
pytest-cov==2.2.1

requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
Scrapy>=1.0.0
22
service-identity>=1.0.0
3-
demjson
4-
six
3+
demjson==2.2.4
4+
six==1.12.0

scrapyrt/core.py

Lines changed: 1 addition & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,8 @@
44
import datetime
55
import os
66
import six
7-
import types
87

9-
from scrapy import signals, log as scrapy_log
8+
from scrapy import signals
109
from scrapy.crawler import CrawlerRunner, Crawler
1110
from scrapy.exceptions import DontCloseSpider
1211
from scrapy.http import Request
@@ -85,45 +84,6 @@ def cleanup_logging(result):
8584

8685
return dfd.addBoth(cleanup_logging)
8786

88-
def _setup_crawler_logging(self, crawler):
89-
log_observer = scrapy_log.start_from_crawler(crawler)
90-
if log_observer:
91-
monkey_patch_and_connect_log_observer(crawler, log_observer)
92-
if self.log_observer:
93-
monkey_patch_and_connect_log_observer(crawler, self.log_observer)
94-
95-
def _stop_logging(self):
96-
if self.log_observer:
97-
try:
98-
self.log_observer.stop()
99-
except ValueError:
100-
# exception on kill
101-
# exceptions.ValueError: list.remove(x): x not in list
102-
# looks like it's safe to ignore it
103-
pass
104-
105-
106-
def monkey_patch_and_connect_log_observer(crawler, log_observer):
107-
"""Ugly hack to close log file.
108-
109-
Monkey patch log_observer.stop method to close file each time
110-
log observer is closed.
111-
I prefer this to be fixed in Scrapy itself, but as
112-
Scrapy is going to switch to standart python logging soon
113-
https://github.com/scrapy/scrapy/pull/1060
114-
this change wouldn't be accepted in preference of merging
115-
new logging sooner.
116-
117-
"""
118-
def stop_and_close_log_file(self):
119-
self.__stop()
120-
self.write.__self__.close()
121-
122-
log_observer.__stop = log_observer.stop
123-
log_observer.stop = types.MethodType(
124-
stop_and_close_log_file, log_observer)
125-
crawler.signals.connect(log_observer.stop, signals.engine_stopped)
126-
12787

12888
class CrawlManager(object):
12989
"""

tox.ini

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,9 @@
11
[tox]
2-
envlist = py27, py27-scrapy{1.0,1.1,1.2}, py35, py36, {py35,py36}-scrapy{1.1,1.2}
2+
envlist = py27, py27-scrapy{1.5,1.6}, py35, py36, {py35,py36}-scrapy{1.5,1.6}
33

44
[testenv]
55
deps =
6-
scrapy1.0: Scrapy>=1.0,<1.1
7-
scrapy1.1: Scrapy>=1.1,<1.2
8-
scrapy1.2: Scrapy>=1.2,<1.3
6+
scrapy1.5: Scrapy>=1.5,<1.6
7+
scrapy1.6: Scrapy>=1.6,<1.7
98
-r{toxinidir}/requirements-dev.txt
109
commands = py.test {posargs}

0 commit comments

Comments
 (0)