Skip to content

Commit 4acc40e

Browse files
committed
Fix Python examples in documentation
1 parent efa665a commit 4acc40e

File tree

8 files changed

+24
-24
lines changed

8 files changed

+24
-24
lines changed

docs/client/overview.rst

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -145,8 +145,8 @@ Scheduling logic supports different options, like
145145

146146
For example, to run a new job for a given spider with custom params::
147147

148-
>>> job = spider.jobs.run(units=2, job_settings={'SETTING': 'VALUE'},
149-
priority=1, add_tag=['tagA','tagB'], meta={'custom-data': 'val1'})
148+
>>> job = spider.jobs.run(units=2, job_settings={'SETTING': 'VALUE'}, priority=1,
149+
... add_tag=['tagA','tagB'], meta={'custom-data': 'val1'})
150150

151151
Note that if you run a job on project level, spider name is required::
152152

@@ -192,7 +192,7 @@ ones::
192192
>>> job_summary = next(project.jobs.iter())
193193
>>> job_summary.get('spider', 'missing')
194194
'foo'
195-
>>> jobs_summary = project.jobs.iter(jobmeta=['scheduled_by', ])
195+
>>> jobs_summary = project.jobs.iter(jobmeta=['scheduled_by'])
196196
>>> job_summary = next(jobs_summary)
197197
>>> job_summary.get('scheduled_by', 'missing')
198198
'John'
@@ -227,10 +227,10 @@ for filtering by state:
227227
- finished
228228
- deleted
229229

230-
Dict entries returned by ``iter`` method contain some additional meta,
230+
Dictionary entries returned by ``iter`` method contain some additional meta,
231231
but can be easily converted to :class:`~scrapinghub.client.jobs.Job` instances with::
232232

233-
>>> [Job(x['key']) for x in jobs]
233+
>>> [Job(client, x['key']) for x in jobs]
234234
[
235235
<scrapinghub.client.Job at 0x106e2cc18>,
236236
<scrapinghub.client.Job at 0x106e260b8>,
@@ -422,9 +422,9 @@ To post a new activity event::
422422
Or post multiple events at once::
423423

424424
>>> events = [
425-
{'event': 'job:completed', 'job': '123/2/5', 'user': 'john'},
426-
{'event': 'job:cancelled', 'job': '123/2/6', 'user': 'john'},
427-
]
425+
... {'event': 'job:completed', 'job': '123/2/5', 'user': 'john'},
426+
... {'event': 'job:cancelled', 'job': '123/2/6', 'user': 'john'},
427+
... ]
428428
>>> project.activity.add(events)
429429

430430

docs/legacy/hubstorage.rst

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ If it used, then it's up to the user to list all the required fields, so only fe
130130
>>> metadata = next(project.jobq.list())
131131
>>> metadata.get('spider', 'missing')
132132
u'foo'
133-
>>> jobs_metadata = project.jobq.list(jobmeta=['scheduled_by', ])
133+
>>> jobs_metadata = project.jobq.list(jobmeta=['scheduled_by'])
134134
>>> metadata = next(jobs_metadata)
135135
>>> metadata.get('scheduled_by', 'missing')
136136
u'John'
@@ -150,7 +150,7 @@ List of tags has ``OR`` power, so in the case above jobs with 'new' or 'verified
150150

151151
To get certain number of last finished jobs per some spider::
152152

153-
>>> jobs_metadata = project.jobq.list(spider='foo', state='finished' count=3)
153+
>>> jobs_metadata = project.jobq.list(spider='foo', state='finished', count=3)
154154

155155
There are 4 possible job states, which can be used as values for filtering by state:
156156

@@ -167,7 +167,7 @@ To iterate through items::
167167

168168
>>> items = job.items.iter_values()
169169
>>> for item in items:
170-
# do something, item is just a dict
170+
... # do something, item is just a dict
171171

172172
Logs
173173
^^^^
@@ -176,7 +176,7 @@ To iterate through 10 first logs for example::
176176

177177
>>> logs = job.logs.iter_values(count=10)
178178
>>> for log in logs:
179-
# do something, log is a dict with log level, message and time keys
179+
... # do something, log is a dict with log level, message and time keys
180180

181181
Collections
182182
^^^^^^^^^^^
@@ -246,4 +246,4 @@ Module contents
246246
:undoc-members:
247247
:show-inheritance:
248248

249-
.. _scrapinghub.ScrapinghubClient: ../client/overview.html
249+
.. _scrapinghub.ScrapinghubClient: ../client/overview.html

docs/quickstart.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ Work with your projects::
3636
Run new jobs from the client::
3737

3838
>>> project = client.get_project(123)
39-
>>> project.jobs.run('spider1', job_args={'arg1':'val1'})
39+
>>> project.jobs.run('spider1', job_args={'arg1': 'val1'})
4040
<scrapinghub.client.Job at 0x106ee12e8>>
4141

4242
Access your jobs data::

scrapinghub/client/activity.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,16 +31,16 @@ class Activity(_Proxy):
3131
- post a new event::
3232
3333
>>> event = {'event': 'job:completed',
34-
'job': '123/2/4',
35-
'user': 'jobrunner'}
34+
... 'job': '123/2/4',
35+
... 'user': 'jobrunner'}
3636
>>> project.activity.add(event)
3737
3838
- post multiple events at once::
3939
4040
>>> events = [
41-
{'event': 'job:completed', 'job': '123/2/5', 'user': 'jobrunner'},
42-
{'event': 'job:cancelled', 'job': '123/2/6', 'user': 'john'},
43-
]
41+
... {'event': 'job:completed', 'job': '123/2/5', 'user': 'jobrunner'},
42+
... {'event': 'job:cancelled', 'job': '123/2/6', 'user': 'john'},
43+
... ]
4444
>>> project.activity.add(events)
4545
4646
"""

scrapinghub/client/collections.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ class Collection(object):
108108
- add a new item to collection::
109109
110110
>>> foo_store.set({'_key': '002d050ee3ff6192dcbecc4e4b4457d7',
111-
'value': '1447221694537'})
111+
... 'value': '1447221694537'})
112112
113113
- count items in collection::
114114
@@ -128,7 +128,7 @@ class Collection(object):
128128
- iterate iterate over _key & value pair::
129129
130130
>>> for elem in foo_store.iter(count=1)):
131-
>>> ... print(elem)
131+
... print(elem)
132132
[{'_key': '002d050ee3ff6192dcbecc4e4b4457d7', 'value': '1447221694537'}]
133133
134134
- filter by multiple keys, only values for keys that exist will be returned::

scrapinghub/client/items.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ class Items(_ItemsResourceProxy, _DownloadableProxyMixin):
2525
- iterate through first 100 items and print them::
2626
2727
>>> for log in job.logs.iter(count=100):
28-
>>> ... print(log)
28+
... print(log)
2929
3030
- retrieve items with timestamp greater or equal to given timestamp
3131
(item here is an arbitrary dictionary depending on your code)::

scrapinghub/client/jobs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -540,7 +540,7 @@ class JobMeta(_MappingProxy):
540540
541541
- update multiple meta fields at once
542542
543-
>>> job.metadata.update({'my-meta1': 'test1', 'my-meta2': 'test2})
543+
>>> job.metadata.update({'my-meta1': 'test1', 'my-meta2': 'test2'})
544544
545545
- delete meta field by name::
546546

scrapinghub/client/logs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ class Logs(_ItemsResourceProxy, _DownloadableProxyMixin):
2828
- iterate through first 100 log entries and print them::
2929
3030
>>> for log in job.logs.iter(count=100):
31-
>>> ... print(log)
31+
... print(log)
3232
3333
- retrieve a single log entry from a job::
3434

0 commit comments

Comments
 (0)