Skip to content

Commit efa665a

Browse files
committed
Minor docstring fixes: links and formatting
1 parent b06ae70 commit efa665a

File tree

7 files changed

+25
-34
lines changed

7 files changed

+25
-34
lines changed

scrapinghub/client/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -70,9 +70,9 @@ def get_project(self, project_id):
7070
return self.projects.get(parse_project_id(project_id))
7171

7272
def get_job(self, job_key):
73-
"""Get Job with a given job key.
73+
"""Get :class:`~scrapinghub.client.jobs.Job` with a given job key.
7474
75-
:param job_key: job key string in format 'project_id/spider_id/job_id',
75+
:param job_key: job key string in format ``project_id/spider_id/job_id``,
7676
where all the components are integers.
7777
:return: a job instance.
7878
:rtype: :class:`~scrapinghub.client.jobs.Job`

scrapinghub/client/collections.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ class Collections(_Proxy, _DownloadableProxyMixin):
2727
def get(self, type_, name):
2828
"""Base method to get a collection with a given type and name.
2929
30-
:param type_: a collection type string.
30+
:param `type_`: a collection type string.
3131
:param name: a collection name string.
3232
:return: a collection object.
3333
:rtype: :class:`Collection`
@@ -248,7 +248,7 @@ def create_writer(self, start=0, auth=None, size=1000, interval=15,
248248
:param maxitemsize: (optional) max item size in bytes.
249249
:param callback: (optional) some callback function.
250250
:return: a new writer object.
251-
:rtype: :class:`~scrapinghub.hubstorage.batchuploader._BatchWriter`
251+
:rtype: :class:`scrapinghub.hubstorage.batchuploader._BatchWriter`
252252
253253
If provided - calllback shouldn't try to inject more items in the queue,
254254
otherwise it can lead to deadlocks.

scrapinghub/client/frontiers.py

Lines changed: 5 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -113,11 +113,7 @@ def list(self):
113113

114114
@property
115115
def newcount(self):
116-
"""Amount of new entries added to all frontiers.
117-
118-
:return: amount of new entries.
119-
:rtype: :class:`int`
120-
"""
116+
"""Integer amount of new entries added to all frontiers."""
121117
return sum(self._origin.newcount.values())
122118

123119
def flush(self):
@@ -199,11 +195,7 @@ def flush(self):
199195

200196
@property
201197
def newcount(self):
202-
"""Amount of new entries added to frontier.
203-
204-
:return: amount of new entries.
205-
:rtype: :class:`int`
206-
"""
198+
"""Integer amount of new entries added to frontier."""
207199
newcount_values = self._frontiers._origin.newcount
208200
return sum(v for (frontier, _), v in newcount_values.items()
209201
if frontier == self.key)
@@ -298,16 +290,13 @@ def flush(self):
298290

299291
@property
300292
def newcount(self):
301-
"""Amount of new entries added to slot.
302-
303-
:return: amount of new entries.
304-
:rtype: :class:`int`
305-
"""
293+
"""Integer amount of new entries added to slot."""
306294
newcount_values = self._frontier._frontiers._origin.newcount
307295
return newcount_values.get((self._frontier.key, self.key), 0)
308296

309297

310298
class FrontierSlotFingerprints(object):
299+
"""Representation of request fingerprints collection stored in slot."""
311300

312301
def __init__(self, slot):
313302
self.key = slot.key
@@ -350,6 +339,7 @@ def list(self, **params):
350339

351340

352341
class FrontierSlotQueue(object):
342+
"""Representation of request batches queue stored in slot."""
353343

354344
def __init__(self, slot):
355345
self.key = slot.key

scrapinghub/client/items.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66
class Items(_ItemsResourceProxy, _DownloadableProxyMixin):
77
"""Representation of collection of job items.
88
9-
Not a public constructor: use :class:`~scrapinghub.client.jobs.Job` instanc
10-
e to get a :class:`Items` instance.
11-
See :attr:`~scrapinghub.client.jobs.Job.items` attribute.
9+
Not a public constructor: use :class:`~scrapinghub.client.jobs.Job`
10+
instance to get a :class:`Items` instance. See
11+
:attr:`~scrapinghub.client.jobs.Job.items` attribute.
1212
1313
Please note that :meth:`list` method can use a lot of memory and for
1414
a large amount of logs it's recommended to iterate through it via

scrapinghub/client/jobs.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ class Jobs(object):
2424
and :attr:`scrapinghub.client.spiders.Spider.jobs` attributes.
2525
2626
:ivar project_id: a string project id.
27-
:ivar spider: :class:`Spider` object if defined.
27+
:ivar spider: :class:`~scrapinghub.client.spiders.Spider` object if defined.
2828
2929
Usage::
3030
@@ -112,16 +112,16 @@ def iter(self, count=None, start=None, spider=None, state=None,
112112
>>> [job['key'] for job in jobs_summary]
113113
['123/1/3', '123/1/2', '123/1/1']
114114
115-
- job summary fieldset is less detailed than job.metadata but contains
116-
few new fields as well. Additional fields can be requested using
117-
``meta`` parameter. If it's used, then it's up to the user to list
118-
all the required fields, so only few default fields would be added
119-
except requested ones::
115+
- job summary fieldset is less detailed than :class:`JobMeta` but
116+
contains a few new fields as well. Additional fields can be requested
117+
using ``meta`` parameter. If it's used, then it's up to the user to
118+
list all the required fields, so only few default fields would be
119+
added except requested ones::
120120
121121
>>> jobs_summary = project.jobs.iter(meta=['scheduled_by', ])
122122
123123
- by default :meth:`Jobs.iter` returns maximum last 1000 results.
124-
Pagination is available using start parameter::
124+
Pagination is available using start parameter::
125125
126126
>>> jobs_summary = spider.jobs.iter(start=1000)
127127
@@ -227,13 +227,14 @@ def run(self, spider=None, units=None, priority=None, meta=None,
227227
return Job(self._client, response['jobid'])
228228

229229
def get(self, job_key):
230-
"""Get a Job with a given job_key.
230+
"""Get a :class:`Job` with a given job_key.
231231
232232
:param job_key: a string job key.
233233
234234
job_key's project component should match the project used to get
235235
:class:`Jobs` instance, and job_key's spider component should match
236-
the spider (if :attr:`Spider.jobs` was used).
236+
the spider (if :class:`~scrapinghub.client.spiders.Spider` was used
237+
to get :class:`Jobs` instance).
237238
238239
:return: a job object.
239240
:rtype: :class:`Job`
@@ -509,7 +510,7 @@ class JobMeta(_MappingProxy):
509510
"""Class representing job metadata.
510511
511512
Not a public constructor: use :class:`Job` instance to get a
512-
:class:`JobMeta` instance. See :attr:`Job.metadata` attribute.
513+
:class:`JobMeta` instance. See :attr:`~Job.metadata` attribute.
513514
514515
Usage:
515516

scrapinghub/client/logs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def log(self, message, level=logging.INFO, ts=None, **other):
5454
5555
:param message: a string message.
5656
:param level: (optional) logging level, default to INFO.
57-
:param ts: (optional) unix timestamp in milliseconds.
57+
:param ts: (optional) UNIX timestamp in milliseconds.
5858
:param \*\*other: other optional kwargs.
5959
"""
6060
self._origin.log(message, level=level, ts=ts, **other)

scrapinghub/client/requests.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def add(self, url, status, method, rs, parent, duration, ts, fp=None):
5050
:param rs: response body length.
5151
:param parent: parent request id or ``None``.
5252
:param duration: request duration in milliseconds.
53-
:param ts: unix timestamp in milliseconds.
53+
:param ts: UNIX timestamp in milliseconds.
5454
:param fp: (optional) string fingerprint for the request.
5555
"""
5656
return self._origin.add(

0 commit comments

Comments
 (0)