Dockerfile
FROM python:3.5RUN cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && pip install --upgrade pip && pip install scrapy scrapyd sqlalchemy scrapy-redis mysql_connector scrapyd-client spiderkeeperCOPY scrapyd.conf /etc/scrapyd/COPY config.txt /VOLUME /dataVOLUME /imagesEXPOSE 6800
config.txt
[mysql]db_host=root:123456@localhost:3306/scrapy[redis]db_host=localhostdb_port=7501[img]path = /images/
scrapyd.conf
[scrapyd]bind_address = 0.0.0.0eggs_dir = /data/eggslogs_dir = /data/logsdbs_dir = /data/dbshttp_port = 6800
docker-compose.yml
version: '2'services: worker_1: image: scrapy hostname: worker_1 volumes: - /Project/docker/worker_1:/data - /Project/docker/images:/images command: scrapyd worker_2: image: scrapy hostname: worker_2 volumes: - /Project/docker/worker_2:/data - /Project/docker/images:/images command: scrapyd spiderkeeper: image: scrapy hostname: spiderkeeper ports: - '5000:5000' volumes: - /Project/docker/spiderkeeper:/data external_links: - worker_1 - worker_2 command: spiderkeeper --database-url=sqlite:data/SpiderKeeper.db --server=http://worker_1:6800 --server=http://worker_2:6800 --username=admin --password=admin