scrapy顺序执行多个爬虫

2019-03-25 15:56:11 浏览数 (1)

代码语言:javascript复制
# -*- coding:utf-8 -*-

from scrapy import cmdline
from scrapy.cmdline import execute
import sys,time,os

#会全部执行爬虫程序
os.system('scrapy crawl ccdi')
os.system('scrapy crawl ccxi')
#-----------------------------------------------------

#只会执行第一个
cmdline.execute('scrapy crawl ccdi'.split())
cmdline.execute('scrapy crawl ccxi'.split())
#-----------------------------------------------------

#只会执行第一个
sys.path.append(os.path.dirname(os.path.abspath(__file__)))  
execute(["scrapy", "crawl", "shanghaione"])  
time.sleep(30)  

sys.path.append(os.path.dirname(os.path.abspath(__file__)))  
execute(["scrapy", "crawl", "shanghaitwo"]) 

0 人点赞