code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def get_reports(self):
"""
Retrieve all reports submitted for this Sample.
:return: A list of :class:`.Report`
"""
url = '{}reports/'.format(self.url)
return Report._get_list_from_url(url, append_base_url=False) | Retrieve all reports submitted for this Sample.
:return: A list of :class:`.Report` |
def norm(self, valu):
'''
Normalize the value for a given type.
Args:
valu (obj): The value to normalize.
Returns:
((obj,dict)): The normalized valu, info tuple.
Notes:
The info dictionary uses the following key conventions:
... | Normalize the value for a given type.
Args:
valu (obj): The value to normalize.
Returns:
((obj,dict)): The normalized valu, info tuple.
Notes:
The info dictionary uses the following key conventions:
subs (dict): The normalized sub-fields as ... |
def getFloat(self, name, default=0.0, parent_search=False, multikeys_search=False):
""" récupération d'un élément float """
try:
value = self.get(name, default, parent_search, multikeys_search)
return float(value)
except:
# pas de configuration trouvé ou convertion impossible ?
return default | récupération d'un élément float |
def circleconvert(amount, currentformat, newformat):
"""
Convert a circle measurement.
:type amount: number
:param amount: The number to convert.
:type currentformat: string
:param currentformat: The format of the provided value.
:type newformat: string
:param newformat: The intended ... | Convert a circle measurement.
:type amount: number
:param amount: The number to convert.
:type currentformat: string
:param currentformat: The format of the provided value.
:type newformat: string
:param newformat: The intended format of the value.
>>> circleconvert(45, "radius", "diamet... |
def _ftp_pwd(self):
"""Variant of `self.ftp.pwd()` that supports encoding-fallback.
Returns:
Current working directory as native string.
"""
try:
return self.ftp.pwd()
except UnicodeEncodeError:
if compat.PY2 or self.ftp.encoding != "... | Variant of `self.ftp.pwd()` that supports encoding-fallback.
Returns:
Current working directory as native string. |
def name(self):
"""
:return:
A unicode string of the field name of the chosen alternative
"""
if not self._name:
self._name = self._alternatives[self._choice][0]
return self._name | :return:
A unicode string of the field name of the chosen alternative |
def dir2fn(ofn, ifn, suffix) -> Union[None, Path]:
"""
ofn = filename or output directory, to create filename based on ifn
ifn = input filename (don't overwrite!)
suffix = desired file extension e.g. .h5
"""
if not ofn: # no output file desired
return None
ofn = Path(ofn).expanduse... | ofn = filename or output directory, to create filename based on ifn
ifn = input filename (don't overwrite!)
suffix = desired file extension e.g. .h5 |
def get_static(root=None):
'''
.. versionadded:: 2015.8.5
Return a list of all static services
root
Enable/disable/mask unit files in the specified root directory
CLI Example:
.. code-block:: bash
salt '*' service.get_static
'''
ret = set()
# Get static systemd u... | .. versionadded:: 2015.8.5
Return a list of all static services
root
Enable/disable/mask unit files in the specified root directory
CLI Example:
.. code-block:: bash
salt '*' service.get_static |
def write_vtu(Verts, Cells, pdata=None, pvdata=None, cdata=None, cvdata=None,
fname='output.vtk'):
"""Write a .vtu file in xml format.
Parameters
----------
fname : {string}
file to be written, e.g. 'mymesh.vtu'
Verts : {array}
Ndof x 3 (if 2, then expanded by 0)
... | Write a .vtu file in xml format.
Parameters
----------
fname : {string}
file to be written, e.g. 'mymesh.vtu'
Verts : {array}
Ndof x 3 (if 2, then expanded by 0)
list of (x,y,z) point coordinates
Cells : {dictionary}
Dictionary of with the keys
pdata : {array}
... |
def get_object(self, url, month_format='%b', day_format='%d'):
"""
Parses the date from a url and uses it in the query. For objects which
are unique for date.
"""
params = self.get_params(url)
try:
year = params[self._meta.year_part]
month = param... | Parses the date from a url and uses it in the query. For objects which
are unique for date. |
def server_inspect_exception(self, req_event, rep_event, task_ctx, exc_info):
"""Called when an exception has been raised in the code run by ZeroRPC"""
# Hide the zerorpc internal frames for readability, for a REQ/REP or
# REQ/STREAM server the frames to hide are:
# - core.ServerBase._a... | Called when an exception has been raised in the code run by ZeroRPC |
def close(self):
"""Close this change stream.
Stops any "async for" loops using this change stream.
"""
if self.delegate:
return self._close()
# Never started.
future = self._framework.get_future(self.get_io_loop())
future.set_result(None)
re... | Close this change stream.
Stops any "async for" loops using this change stream. |
def get_location(dom, location):
"""
Get the node at the specified location in the dom.
Location is a sequence of child indices, starting at the children of the
root element. If there is no node at this location, raise a ValueError.
"""
node = dom.documentElement
for i in location:
n... | Get the node at the specified location in the dom.
Location is a sequence of child indices, starting at the children of the
root element. If there is no node at this location, raise a ValueError. |
def allele_reads_from_locus_reads(locus_reads, n_ref):
"""
Given a collection of LocusRead objects, returns a
list of AlleleRead objects
(which are split into prefix/allele/suffix nucleotide strings).
Parameters
----------
locus_reads : sequence of LocusRead records
n_ref : int
... | Given a collection of LocusRead objects, returns a
list of AlleleRead objects
(which are split into prefix/allele/suffix nucleotide strings).
Parameters
----------
locus_reads : sequence of LocusRead records
n_ref : int
Number of reference nucleotides affected by variant.
Generate... |
def _compensate_pressure(self, adc_p):
"""Compensate pressure.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015.
"""
var_1 = (self._temp_fine / 2.0... | Compensate pressure.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015. |
def split_sentences(tokens):
"""Split sentences (based on tokenised data), returns sentences as a list of lists of tokens, each sentence is a list of tokens"""
begin = 0
for i, token in enumerate(tokens):
if is_end_of_sentence(tokens, i):
yield tokens[begin:i+1]
begin = i+1
... | Split sentences (based on tokenised data), returns sentences as a list of lists of tokens, each sentence is a list of tokens |
def editors(self):
"""
Returns the editors that are associated with this edit.
:return [<XLineEdit>, ..]
"""
lay = self.layout()
return [lay.itemAt(i).widget() for i in range(lay.count())] | Returns the editors that are associated with this edit.
:return [<XLineEdit>, ..] |
def action_spatial(self, action):
"""Given an Action, return the right spatial action."""
if self.surf.surf_type & SurfType.FEATURE:
return action.action_feature_layer
elif self.surf.surf_type & SurfType.RGB:
return action.action_render
else:
assert self.surf.surf_type & (SurfType.RGB ... | Given an Action, return the right spatial action. |
def connect(self):
"""Connects and subscribes to the WebSocket Feed."""
if not self.connected():
self._ws = create_connection(self.WS_URI)
message = {
'type':self.WS_TYPE,
'product_id':self.WS_PRODUCT_ID
}
self._ws.send(dumps(message))
# There will be only one keep... | Connects and subscribes to the WebSocket Feed. |
def add_value(self, value, index_point):
"""The function is addeing new value to provied index. If index does not exist"""
if index_point not in self.index:
self.values.append(value)
self.index.append(index_point) | The function is addeing new value to provied index. If index does not exist |
def ensure(assertion, message=None):
"""
Checks an assertion argument for truth-ness. Will return ``True`` or
explicitly raise ``AssertionError``. This is to deal with environments
using ``python -O` or ``PYTHONOPTIMIZE=``.
:param assertion: some value to evaluate for truth-ness
:param message:... | Checks an assertion argument for truth-ness. Will return ``True`` or
explicitly raise ``AssertionError``. This is to deal with environments
using ``python -O` or ``PYTHONOPTIMIZE=``.
:param assertion: some value to evaluate for truth-ness
:param message: optional message used for raising AssertionError |
def bulk_overwrite(self, entities_and_kinds):
"""
Update the group to the given entities and sub-entity groups.
After this operation, the only members of this EntityGroup
will be the given entities, and sub-entity groups.
:type entities_and_kinds: List of (Entity, EntityKind) p... | Update the group to the given entities and sub-entity groups.
After this operation, the only members of this EntityGroup
will be the given entities, and sub-entity groups.
:type entities_and_kinds: List of (Entity, EntityKind) pairs.
:param entities_and_kinds: A list of entity, entity-... |
def setCodecPreferences(self, codecs):
"""
Override the default codec preferences.
See :meth:`RTCRtpSender.getCapabilities` and :meth:`RTCRtpReceiver.getCapabilities`
for the supported codecs.
:param: codecs: A list of :class:`RTCRtpCodecCapability`, in decreasing order
... | Override the default codec preferences.
See :meth:`RTCRtpSender.getCapabilities` and :meth:`RTCRtpReceiver.getCapabilities`
for the supported codecs.
:param: codecs: A list of :class:`RTCRtpCodecCapability`, in decreasing order
of preference. If empty, restores the defa... |
def compute_acf(cls, filename, start_index=None, end_index=None,
per_walker=False, walkers=None, parameters=None,
temps=None):
"""Computes the autocorrleation function of the model params in the
given file.
By default, parameter values are averaged over a... | Computes the autocorrleation function of the model params in the
given file.
By default, parameter values are averaged over all walkers at each
iteration. The ACF is then calculated over the averaged chain for each
temperature. An ACF per-walker will be returned instead if
``per... |
def _perturbation(self):
"""
Internal function for parameter initialization
Returns Gaussian perturbation
"""
if self.P>1:
scales = []
for term_i in range(self.n_randEffs):
_scales = sp.randn(self.diag[term_i].shape[0])
if s... | Internal function for parameter initialization
Returns Gaussian perturbation |
def _get_webapi_requests(self):
"""Update headers of webapi for Requests."""
headers = {
'Accept':
'*/*',
'Accept-Language':
'zh-CN,zh;q=0.8,gl;q=0.6,zh-TW;q=0.4',
'Connection':
'keep-alive',
'Content-Type':
... | Update headers of webapi for Requests. |
def sim(
self,
src,
tar,
qval=1,
mode='winkler',
long_strings=False,
boost_threshold=0.7,
scaling_factor=0.1,
):
"""Return the Jaro or Jaro-Winkler similarity of two strings.
Parameters
----------
src : str
... | Return the Jaro or Jaro-Winkler similarity of two strings.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
qval : int
The length of each q-gram (defaults to 1: character-wise matching)
... |
def make_filter(self, fieldname, query_func, expct_value):
''' makes a filter that will be appliead to an object's property based
on query_func '''
def actual_filter(item):
value = getattr(item, fieldname)
if query_func in NULL_AFFECTED_FILTERS and value is None:
... | makes a filter that will be appliead to an object's property based
on query_func |
def translate(s, table, deletions=""):
"""translate(s,table [,deletions]) -> string
Return a copy of the string s, where all characters occurring
in the optional argument deletions are removed, and the
remaining characters have been mapped through the given
translation table, which must be a string... | translate(s,table [,deletions]) -> string
Return a copy of the string s, where all characters occurring
in the optional argument deletions are removed, and the
remaining characters have been mapped through the given
translation table, which must be a string of length 256. The
deletions argument is... |
def get(aadb: str):
""" Retrieves a value from config """
if (aadb):
cfg = Config()
value = cfg.get(ConfigKeys.asset_allocation_database_path)
click.echo(value)
if not aadb:
click.echo("Use --help for more information.") | Retrieves a value from config |
def tpu_estimator_model_fn(model_type,
transformer_model,
model_dir,
use_tpu,
mesh_shape,
layout_rules,
batch_size,
sequence_length... | Create a TPUEstimator model function.
Args:
model_type: a string
transformer_model: a transformer.Unitransformer or transformer.Bitransformer
model_dir: a string
use_tpu: a boolean
mesh_shape: a mtf.Shape
layout_rules: a mtf.LayoutRules
batch_size: an integer
sequence_length: an integ... |
def _get_logical_raid_levels(self):
"""Gets the different raid levels configured on a server.
:returns a dictionary of logical_raid_levels set to true.
Example if raid level 1+0 and 6 are configured, it returns
{'logical_raid_level_10': 'true',
'logical_raid_level_6... | Gets the different raid levels configured on a server.
:returns a dictionary of logical_raid_levels set to true.
Example if raid level 1+0 and 6 are configured, it returns
{'logical_raid_level_10': 'true',
'logical_raid_level_6': 'true'} |
def clickmap(parser, token):
"""
Clickmap tracker template tag.
Renders Javascript code to track page visits. You must supply
your clickmap tracker ID (as a string) in the ``CLICKMAP_TRACKER_ID``
setting.
"""
bits = token.split_contents()
if len(bits) > 1:
raise TemplateSyntaxE... | Clickmap tracker template tag.
Renders Javascript code to track page visits. You must supply
your clickmap tracker ID (as a string) in the ``CLICKMAP_TRACKER_ID``
setting. |
def _send(self):
"""
Send data to statsd. Fire and forget. Cross fingers and it'll arrive.
"""
if not statsd:
return
for metric in self.metrics:
# Split the path into a prefix and a name
# to work with the statsd module's view of the world.
... | Send data to statsd. Fire and forget. Cross fingers and it'll arrive. |
def jit_load(self):
"""
Import and instantiate this JIT object
Returns
-------
"""
try:
model = importlib.import_module('.' + self.model, 'andes.models')
device = getattr(model, self.device)
self.system.__dict__[self.name] = device(se... | Import and instantiate this JIT object
Returns
------- |
def get_attrs(self):
"""Get the global attributes from underlying data set."""
return FrozenOrderedDict((a, getattr(self.ds, a)) for a in self.ds.ncattrs()) | Get the global attributes from underlying data set. |
def delete_all(config=None):
"""
Deletes all hosts from ssh config.
"""
storm_ = get_storm_instance(config)
try:
storm_.delete_all_entries()
print(get_formatted_message('all entries deleted.', 'success'))
except Exception as error:
print(get_formatted_message(str(error),... | Deletes all hosts from ssh config. |
def open_db(db, zipped=None, encoding=None, fieldnames_lower=True, case_sensitive=True):
"""Context manager. Allows reading DBF file (maybe even from zip).
:param str|unicode|file db: .dbf file name or a file-like object.
:param str|unicode zipped: .zip file path or a file-like object.
:param str|uni... | Context manager. Allows reading DBF file (maybe even from zip).
:param str|unicode|file db: .dbf file name or a file-like object.
:param str|unicode zipped: .zip file path or a file-like object.
:param str|unicode encoding: Encoding used by DB.
This will be used if there's no encoding information... |
def distributions(self, _args):
"""Lists all distributions currently available (i.e. that have already
been built)."""
ctx = self.ctx
dists = Distribution.get_distributions(ctx)
if dists:
print('{Style.BRIGHT}Distributions currently installed are:'
... | Lists all distributions currently available (i.e. that have already
been built). |
def append_manage_data_op(self, data_name, data_value, source=None):
"""Append a :class:`ManageData <stellar_base.operation.ManageData>`
operation to the list of operations.
:param str data_name: String up to 64 bytes long. If this is a new Name
it will add the given name/value pair... | Append a :class:`ManageData <stellar_base.operation.ManageData>`
operation to the list of operations.
:param str data_name: String up to 64 bytes long. If this is a new Name
it will add the given name/value pair to the account. If this Name
is already present then the associated... |
def dumps(self, script):
"Return a compressed representation of script as a binary string."
string = BytesIO()
self._dump(script, string, self._protocol, self._version)
return string.getvalue() | Return a compressed representation of script as a binary string. |
def set_entries(self, entries: List[Tuple[str, str]], titles, resources):
""" Provide the template the data for the toc entries """
self.entries = []
for flag, pagename in entries:
title = titles[pagename].children[0]
resource = resources.get(pagename, None)
... | Provide the template the data for the toc entries |
def get_support_variables(polynomial):
"""Gets the support of a polynomial.
"""
support = []
if is_number_type(polynomial):
return support
for monomial in polynomial.expand().as_coefficients_dict():
mon, _ = __separate_scalar_factor(monomial)
symbolic_support = flatten(split_... | Gets the support of a polynomial. |
def create_ver_browser(self, layout):
"""Create a version browser and insert it into the given layout
:param layout: the layout to insert the browser into
:type layout: QLayout
:returns: the created browser
:rtype: :class:`jukeboxcore.gui.widgets.browser.ComboBoxBrowser`
... | Create a version browser and insert it into the given layout
:param layout: the layout to insert the browser into
:type layout: QLayout
:returns: the created browser
:rtype: :class:`jukeboxcore.gui.widgets.browser.ComboBoxBrowser`
:raises: None |
def integers(start, count):
'''Generates in sequence the integral numbers within a range.
Note: This method uses deferred execution.
Args:
start: The first integer in the sequence.
count: The number of sequential integers to generate.
Returns:
A Queryable over the sp... | Generates in sequence the integral numbers within a range.
Note: This method uses deferred execution.
Args:
start: The first integer in the sequence.
count: The number of sequential integers to generate.
Returns:
A Queryable over the specified range of integers.
Ra... |
def get_published(self, layer_id, expand=[]):
"""
Get the latest published version of this layer.
:raises NotFound: if there is no published version.
"""
target_url = self.client.get_url('VERSION', 'GET', 'published', {'layer_id': layer_id})
return self._get(target_url, e... | Get the latest published version of this layer.
:raises NotFound: if there is no published version. |
def get_extra_functions(self) -> Dict[str, Callable]:
"""Get a list of additional features
Returns:
Dict[str, Callable]: A dict of methods marked as additional features.
Method can be called with ``get_extra_functions()["methodName"]()``.
"""
if self.channel_type... | Get a list of additional features
Returns:
Dict[str, Callable]: A dict of methods marked as additional features.
Method can be called with ``get_extra_functions()["methodName"]()``. |
def is_de_listed(self):
"""
判断合约是否过期
"""
env = Environment.get_instance()
instrument = env.get_instrument(self._order_book_id)
current_date = env.trading_dt
if instrument.de_listed_date is not None:
if instrument.de_listed_date.date() > env.config.bas... | 判断合约是否过期 |
def _write_cache(self, lines, append=False):
"""Write virtualenv metadata to cache."""
mode = 'at' if append else 'wt'
with open(self.filepath, mode, encoding='utf8') as fh:
fh.writelines(line + '\n' for line in lines) | Write virtualenv metadata to cache. |
def fit(self, train_x, train_y):
""" Fit the regressor with more data.
Args:
train_x: A list of NetworkDescriptor.
train_y: A list of metric values.
"""
if self.first_fitted:
self.incremental_fit(train_x, train_y)
else:
self.first_f... | Fit the regressor with more data.
Args:
train_x: A list of NetworkDescriptor.
train_y: A list of metric values. |
def _process_results(self, raw_results, *args, **kwargs):
"""
Naively translate between the 'aggregations' search result data
structure returned by ElasticSearch 2+ in response to 'aggs' queries
into a structure with 'facets'-like content that Haystack (2.6.1) can
understand and ... | Naively translate between the 'aggregations' search result data
structure returned by ElasticSearch 2+ in response to 'aggs' queries
into a structure with 'facets'-like content that Haystack (2.6.1) can
understand and process, then pass it on to Haystack's default result
processing code.... |
def read_raw(data_path):
"""
Parameters
----------
data_path : str
"""
with open(data_path, 'rb') as f:
data = pickle.load(f)
return data | Parameters
----------
data_path : str |
def get_trees(self, data, showerrors = False): # -> list:
""" returns a list of trees with valid guesses """
if showerrors:
raise NotImplementedError("This parser doesn't implement errors")
self.data = data
self.index = 0
try:
return [self.__aux_parser(sel... | returns a list of trees with valid guesses |
def direct_messages(self, delegate, params={}, extra_args=None):
"""Get direct messages for the authenticating user.
Search results are returned one message at a time a DirectMessage
objects"""
return self.__get('/direct_messages.xml', delegate, params,
txml.Di... | Get direct messages for the authenticating user.
Search results are returned one message at a time a DirectMessage
objects |
def _on_group_stream_changed(self, data):
"""Handle group stream change."""
self._groups.get(data.get('id')).update_stream(data) | Handle group stream change. |
def _run_apt_command(cmd, fatal=False):
"""Run an apt command with optional retries.
:param: cmd: str: The apt command to run.
:param: fatal: bool: Whether the command's output should be checked and
retried.
"""
# Provide DEBIAN_FRONTEND=noninteractive if not present in the environment.
... | Run an apt command with optional retries.
:param: cmd: str: The apt command to run.
:param: fatal: bool: Whether the command's output should be checked and
retried. |
async def _on_trace_notification(self, trace_event):
"""Callback function called when a trace chunk is received.
Args:
trace_chunk (dict): The received trace chunk information
"""
conn_string = trace_event.get('connection_string')
payload = trace_event.get('payload'... | Callback function called when a trace chunk is received.
Args:
trace_chunk (dict): The received trace chunk information |
def on_quote_changed(self, tiny_quote):
"""报价、摆盘实时数据变化时,会触发该回调"""
# TinyQuoteData
data = tiny_quote
str_log = "on_quote_changed symbol=%s open=%s high=%s close=%s low=%s" % (data.symbol, data.openPrice, data.highPrice, data.lastPrice, data.lowPrice)
self.log(str_log) | 报价、摆盘实时数据变化时,会触发该回调 |
def create(self, data):
""" Create object from the given data.
The given data may or may not have been validated prior to calling
this function. This function will try its best in creating the object.
If the resulting object cannot be produced, raises ``ValidationError``.
The s... | Create object from the given data.
The given data may or may not have been validated prior to calling
this function. This function will try its best in creating the object.
If the resulting object cannot be produced, raises ``ValidationError``.
The spec can affect how individual fields... |
def calculated_intervals(self, value):
"""
Set the calculated intervals
This will be written to the stream_status collection if it's in the database channel
:param value: The calculated intervals
:type value: TimeIntervals, TimeInterval, list[TimeInterval]
"""
if... | Set the calculated intervals
This will be written to the stream_status collection if it's in the database channel
:param value: The calculated intervals
:type value: TimeIntervals, TimeInterval, list[TimeInterval] |
def addExpectedFailure(self, test: unittest.case.TestCase, err: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param err: tuple of the form (Exception class, Exception instance, tracebac... | Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param err: tuple of the form (Exception class, Exception instance, traceback) |
def poll_event(self):
"""
Waits for an event to happen and returns a string related to the event.
If the event is a normal (letter) key press, the letter is returned (case sensitive)
:return: Event type
"""
# Flush all inputs before this one that were done since last po... | Waits for an event to happen and returns a string related to the event.
If the event is a normal (letter) key press, the letter is returned (case sensitive)
:return: Event type |
def eigenvectors_nrev(T, right=True):
r"""Compute eigenvectors of transition matrix.
Parameters
----------
T : (d, d) ndarray
Transition matrix (stochastic matrix)
k : int or tuple of ints, optional
Compute the first k eigenvalues of T
right : bool, optional
If right=Tru... | r"""Compute eigenvectors of transition matrix.
Parameters
----------
T : (d, d) ndarray
Transition matrix (stochastic matrix)
k : int or tuple of ints, optional
Compute the first k eigenvalues of T
right : bool, optional
If right=True compute right eigenvectors, left eigenve... |
def consume_network_packet_messages_from_redis():
"""consume_network_packet_messages_from_redis
Setup a ``celery_connectors.KombuSubscriber`` to consume meessages
from the ``FORWARD_BROKER_URL`` broker in the ``FORWARD_QUEUE``
queue.
"""
# end of recv_message
# Initialize KombuSubscriber
... | consume_network_packet_messages_from_redis
Setup a ``celery_connectors.KombuSubscriber`` to consume meessages
from the ``FORWARD_BROKER_URL`` broker in the ``FORWARD_QUEUE``
queue. |
def get_imports(filename):
"""Get all the imports in a file.
Each import is a tuple of:
(name, alias, is_from, is_star, source_file)
"""
with open(filename, "rb") as f:
src = f.read()
finder = ImportFinder()
finder.visit(ast.parse(src, filename=filename))
imports = []
for ... | Get all the imports in a file.
Each import is a tuple of:
(name, alias, is_from, is_star, source_file) |
def get(self, file_id: str) -> [typing.BinaryIO, str, datetime.datetime]:
"""Return the file identified by a file_id string, its file name and upload date."""
raise NotImplementedError("Downloading files for downloading files in FileStore has not been implemented yet.") | Return the file identified by a file_id string, its file name and upload date. |
def fit(self, X, y=None, groups=None, **fit_params):
"""
Run fit on the estimator with parameters chosen sequentially by SigOpt.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
... | Run fit on the estimator with parameters chosen sequentially by SigOpt.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array-like, shape ... |
def entrypoint(cls):
"""Mark the decorated command as the intended entrypoint of the
command module.
"""
if not isinstance(cls, type) or not issubclass(cls, Command):
raise TypeError(f"inappropriate entrypoint instance of type {cls.__class__}")
cls._argcmdr_entrypoint_ = True
return cls | Mark the decorated command as the intended entrypoint of the
command module. |
def url_(client_id: str, redirect_uri: str, *, scope: str = None, state: str = None, secure: bool = True) -> str:
"""Construct a OAuth2 URL instead of an OAuth2 object."""
attrs = {
'client_id': client_id,
'redirect_uri': quote(redirect_uri)
}
if scope is not Non... | Construct a OAuth2 URL instead of an OAuth2 object. |
def crosslisting_feature(catalog, soup):
"""Parses all the crosslistings. These refer to the similar CRNs,
such as a grad & undergrad level course.
"""
listing = {}
for elem in soup.coursedb.findAll('crosslisting'):
seats = int(elem['seats'])
crns = [safeInt(crn.string) for crn in el... | Parses all the crosslistings. These refer to the similar CRNs,
such as a grad & undergrad level course. |
def _get_value(data_structure, key):
"""Return the value of a data_structure given a path.
:param data_structure: Dictionary, list or subscriptable object.
:param key: Array with the defined path ordered.
"""
if len(key) == 0:
raise KeyError()
value = data_structure[key[0]]
if len(k... | Return the value of a data_structure given a path.
:param data_structure: Dictionary, list or subscriptable object.
:param key: Array with the defined path ordered. |
def getSparseTensor(numNonzeros, inputSize, outputSize,
onlyPositive=False,
fixedRange=1.0/24):
"""
Return a random tensor that is initialized like a weight matrix
Size is outputSize X inputSize, where weightSparsity% of each row is non-zero
"""
# Initialize weights in ... | Return a random tensor that is initialized like a weight matrix
Size is outputSize X inputSize, where weightSparsity% of each row is non-zero |
def step( self, local_inv=None, peer_table=None, peer_queue=None, con=None, path=None ):
"""
Execute one round of the peer discovery algorithm:
* Add at most 10 new peers from the pending peer queue
(but ping them first, and drop hosts if the pending queue
gets to be too long).
... | Execute one round of the peer discovery algorithm:
* Add at most 10 new peers from the pending peer queue
(but ping them first, and drop hosts if the pending queue
gets to be too long).
* Execute one step of the MHRWDA algorithm. Add any new
peers from the neighbor sets discover... |
def namespace(self):
"""
Return the Namespace URI (if any) as a String for the current tag
"""
if self.m_name == -1 or (self.m_event != const.START_TAG and self.m_event != const.END_TAG):
return u''
# No Namespace
if self.m_namespaceUri == 0xFFFFFFFF:
... | Return the Namespace URI (if any) as a String for the current tag |
def get_repo(self, auth, username, repo_name):
"""
Returns a the repository with name ``repo_name`` owned by
the user with username ``username``.
:param auth.Authentication auth: authentication object
:param str username: username of owner of repository
:param str repo_n... | Returns a the repository with name ``repo_name`` owned by
the user with username ``username``.
:param auth.Authentication auth: authentication object
:param str username: username of owner of repository
:param str repo_name: name of repository
:return: a representation of the re... |
def eval(self, construct):
"""Evaluate an expression returning its value.
The Python equivalent of the CLIPS eval command.
"""
data = clips.data.DataObject(self._env)
if lib.EnvEval(self._env, construct.encode(), data.byref) != 1:
raise CLIPSError(self._env)
... | Evaluate an expression returning its value.
The Python equivalent of the CLIPS eval command. |
def config_merge_text(source='running',
merge_config=None,
merge_path=None,
saltenv='base'):
'''
.. versionadded:: 2019.2.0
Return the merge result of the configuration from ``source`` with the
merge configuration, as plain text (without... | .. versionadded:: 2019.2.0
Return the merge result of the configuration from ``source`` with the
merge configuration, as plain text (without loading the config on the
device).
source: ``running``
The configuration type to retrieve from the network device. Default:
``running``. Availabl... |
def delete_file(self, path, prefixed_path, source_storage):
"""
We don't need all the file_exists stuff because we have to override all files anyways.
"""
if self.faster:
return True
else:
return super(Command, self).delete_file(path, prefixed_path, source... | We don't need all the file_exists stuff because we have to override all files anyways. |
def map(source = 'density', z = 0, x = 0, y = 0, format = '@1x.png',
srs='EPSG:4326', bin=None, hexPerTile=None, style='classic.point',
taxonKey=None, country=None, publishingCountry=None, publisher=None,
datasetKey=None, year=None, basisOfRecord=None, **kwargs):
'''
GBIF maps API
:param sou... | GBIF maps API
:param source: [str] Either ``density`` for fast, precalculated tiles,
or ``adhoc`` for any search
:param z: [str] zoom level
:param x: [str] longitude
:param y: [str] latitude
:param format: [str] format of returned data. One of:
- ``.mvt`` - vector tile
- ``@Hx.... |
def cli(*args, **kwargs):
"""
通用自动化处理工具
详情参考 `GitHub <https://github.com/littlemo/mohand>`_
"""
log.debug('cli: {} {}'.format(args, kwargs))
# 使用终端传入的 option 更新 env 中的配置值
env.update(kwargs) | 通用自动化处理工具
详情参考 `GitHub <https://github.com/littlemo/mohand>`_ |
def database(self, database_id, ddl_statements=(), pool=None):
"""Factory to create a database within this instance.
:type database_id: str
:param database_id: The ID of the instance.
:type ddl_statements: list of string
:param ddl_statements: (Optional) DDL statements, excludi... | Factory to create a database within this instance.
:type database_id: str
:param database_id: The ID of the instance.
:type ddl_statements: list of string
:param ddl_statements: (Optional) DDL statements, excluding the
'CREATE DATABSE' statement.
... |
def numeric_function_clean_dataframe(self, axis):
"""Preprocesses numeric functions to clean dataframe and pick numeric indices.
Args:
axis: '0' if columns and '1' if rows.
Returns:
Tuple with return value(if any), indices to apply func to & cleaned Manager.
"""... | Preprocesses numeric functions to clean dataframe and pick numeric indices.
Args:
axis: '0' if columns and '1' if rows.
Returns:
Tuple with return value(if any), indices to apply func to & cleaned Manager. |
def javascript_escape(s, quote_double_quotes=True):
"""
Escape characters for javascript strings.
"""
ustring_re = re.compile(u"([\u0080-\uffff])")
def fix(match):
return r"\u%04x" % ord(match.group(1))
if type(s) == str:
s = s.decode('utf-8')
elif type(s) != six.text_type:... | Escape characters for javascript strings. |
def is_eligible(self, timestamp, status, notif_number, in_notif_time, interval, escal_period):
# pylint: disable=too-many-return-statements
"""Check if the escalation is eligible (notification is escalated or not)
Escalation is NOT eligible in ONE of the following condition is fulfilled::
... | Check if the escalation is eligible (notification is escalated or not)
Escalation is NOT eligible in ONE of the following condition is fulfilled::
* escalation is not time based and notification number not in range
[first_notification;last_notification] (if last_notif == 0, it's infinity)
... |
def virtualenv_no_global():
"""
Return True if in a venv and no system site packages.
"""
#this mirrors the logic in virtualenv.py for locating the no-global-site-packages.txt file
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
no_global_file = os.path.join(site_mod_dir, 'no-glob... | Return True if in a venv and no system site packages. |
def set_passport_data_errors(self, user_id, errors):
"""
Informs a user that some of the Telegram Passport elements they provided contains errors. The user will not be able to re-submit their Passport to you until the errors are fixed (the contents of the field for which you returned the error must chan... | Informs a user that some of the Telegram Passport elements they provided contains errors. The user will not be able to re-submit their Passport to you until the errors are fixed (the contents of the field for which you returned the error must change). Returns True on success.
Use this if the data submitted by t... |
def add_neighbor(self, edge: "Edge") -> None:
"""
Adds a new neighbor to the node.
Arguments:
edge (Edge): The edge that would connect this node with its neighbor.
"""
if edge is None or (edge.source != self and edge.target != self):
return
... | Adds a new neighbor to the node.
Arguments:
edge (Edge): The edge that would connect this node with its neighbor. |
def header(self, array):
"""Specify the header of the table
"""
self._check_row_size(array)
self._header = list(map(obj2unicode, array))
return self | Specify the header of the table |
def relations_used(self):
"""
Return list of all relations used to connect edges
"""
g = self.get_graph()
types = set()
for (x,y,d) in g.edges(data=True):
types.add(d['pred'])
return list(types) | Return list of all relations used to connect edges |
def _build_environ(self) -> Dict[str, Optional[str]]:
"""
Build environment variables suitable for passing to the Model.
"""
d: Dict[str, Optional[str]] = {}
if self.__config__.case_insensitive:
env_vars = {k.lower(): v for k, v in os.environ.items()}
else:
... | Build environment variables suitable for passing to the Model. |
def transfer_size(self):
"""Size of transfer in bytes (e.g.: 8, 4k, 2m, 1g)"""
ts = self.attributes['transfer_size']
if isinstance(ts, six.string_types):
ts = shlex.split(ts)
ts = [str(e) for e in ts]
return ts | Size of transfer in bytes (e.g.: 8, 4k, 2m, 1g) |
def ask(question):
'''
Infinite loop to get yes or no answer or quit the script.
'''
while True:
ans = input(question)
al = ans.lower()
if match('^y(es)?$', al):
return True
elif match('^n(o)?$', al):
return False
elif match('^q(uit)?$', al... | Infinite loop to get yes or no answer or quit the script. |
def find_all_files(glob):
"""
Finds all files in the django finders for a given glob,
returns the file path, if available, and the django storage object.
storage objects must implement the File storage API:
https://docs.djangoproject.com/en/dev/ref/files/storage/
"""
for finder in finders.ge... | Finds all files in the django finders for a given glob,
returns the file path, if available, and the django storage object.
storage objects must implement the File storage API:
https://docs.djangoproject.com/en/dev/ref/files/storage/ |
def get(self, path, content=True, type=None, format=None, load_alternative_format=True):
""" Takes a path for an entity and returns its model"""
path = path.strip('/')
ext = os.path.splitext(path)[1]
# Not a notebook?
if not self.exists(path) or (type != 'notebook' if type else ... | Takes a path for an entity and returns its model |
def write_file(content, *path):
"""
Simply write some content to a file, overriding the file if necessary.
"""
with open(os.path.join(*path), "w") as file:
return file.write(content) | Simply write some content to a file, overriding the file if necessary. |
def clean_whitespace(statement):
"""
Remove any consecutive whitespace characters from the statement text.
"""
import re
# Replace linebreaks and tabs with spaces
statement.text = statement.text.replace('\n', ' ').replace('\r', ' ').replace('\t', ' ')
# Remove any leeding or trailing white... | Remove any consecutive whitespace characters from the statement text. |
def post(self, res_path, data=None, files=None, timeout=10.):
"""
Post operation.
:param str res_path:
Resource path.
:param list data:
Request parameters for data.
:param list files:
Request parameters for files.
:param float timeout:
Timeout in seconds.
:rtype:
tuple
:ret... | Post operation.
:param str res_path:
Resource path.
:param list data:
Request parameters for data.
:param list files:
Request parameters for files.
:param float timeout:
Timeout in seconds.
:rtype:
tuple
:return:
Tuple with status code and response body. |
def handle_get_token(self, req):
"""Handles the various `request for token and service end point(s)` calls.
There are various formats to support the various auth servers in the
past. Examples::
GET <auth-prefix>/v1/<act>/auth
X-Auth-User: <act>:<usr> or X-Storage-U... | Handles the various `request for token and service end point(s)` calls.
There are various formats to support the various auth servers in the
past. Examples::
GET <auth-prefix>/v1/<act>/auth
X-Auth-User: <act>:<usr> or X-Storage-User: <usr>
X-Auth-Key: <key>... |
def browse(self, ml_item=None, start=0, max_items=100,
full_album_art_uri=False, search_term=None, subcategories=None):
"""Browse (get sub-elements from) a music library item.
Args:
ml_item (`DidlItem`): the item to browse, if left out or
`None`, items at the ... | Browse (get sub-elements from) a music library item.
Args:
ml_item (`DidlItem`): the item to browse, if left out or
`None`, items at the root level will be searched.
start (int): the starting index of the results.
max_items (int): the maximum number of items ... |
def _run_paired(paired):
"""Run somatic variant calling pipeline.
"""
from bcbio.structural import titancna
work_dir = _sv_workdir(paired.tumor_data)
seg_files = model_segments(tz.get_in(["depth", "bins", "normalized"], paired.tumor_data),
work_dir, paired)
call_fi... | Run somatic variant calling pipeline. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.