Skip to content

Commit

Permalink
added command line option for manually instruct to load of SC settings.
Browse files Browse the repository at this point in the history
  • Loading branch information
kalessin committed Sep 9, 2024
1 parent 39a9e4a commit cac764d
Showing 1 changed file with 7 additions and 3 deletions.
10 changes: 7 additions & 3 deletions shub_workflow/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ def __init__(self):
self.project_settings = get_project_settings()
self.spider_loader = SpiderLoader(self.project_settings)
super().__init__()
if resolve_shub_jobkey() is None:
if self.args.load_sc_settings and resolve_shub_jobkey() is None:
self.project_settings.setdict(self.get_sc_project_settings(), priority="project")
self.set_flow_id_name(self.args)

Expand Down Expand Up @@ -273,6 +273,11 @@ def add_argparser_options(self):
action="append",
default=self.children_tags or [],
)
self.argparser.add_argument(
"--load-sc-settings",
action="store_true",
help="If provided, and running on a local environment, load scrapy cloud settings.",
)

def parse_project_id(self, args: Namespace) -> int:
return args.project_id
Expand Down Expand Up @@ -554,8 +559,7 @@ def get_canonical_spidername(self, spidername: SpiderName) -> SpiderName:
raise ValueError(f"Spider {spidername} does not exist.")

def get_project_running_spiders(
self, canonical: bool = False, crawlmanagers: Tuple[str, ...] = (),
only_crawlmanagers: bool = False
self, canonical: bool = False, crawlmanagers: Tuple[str, ...] = (), only_crawlmanagers: bool = False
) -> Set[SpiderName]:
"""
Get all running spiders.
Expand Down

0 comments on commit cac764d

Please sign in to comment.