code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def del_pipes(self, pipes, *args, **kwargs):
"""
Deletes a sequence of pipes from the ``Dagger`` in the specified order.
Takes optional arguments for ``Dagger.del_pipe``.
Arguments:
- pipes(sequence of valid ``del_pipe`` arguments) Sequence of pipes or
... | Deletes a sequence of pipes from the ``Dagger`` in the specified order.
Takes optional arguments for ``Dagger.del_pipe``.
Arguments:
- pipes(sequence of valid ``del_pipe`` arguments) Sequence of pipes or
other valid ``Dagger.del_pipe`` arguments to be removed fr... |
def _parse_topic(client, command, actor, args):
"""Parse a TOPIC and update channel state, then dispatch a TOPIC event."""
channel, _, topic = args.partition(" :")
channel = client.server.get_channel(channel)
channel.topic = topic or None
if actor:
actor = User(actor)
client.dispatch_eve... | Parse a TOPIC and update channel state, then dispatch a TOPIC event. |
def get_opcodes(self):
"""Returns a list of opcodes. Opcodes are the same as defined by
:py:mod:`difflib`."""
if not self.opcodes:
d, m, opcodes = edit_distance_backpointer(self.seq1, self.seq2,
action_function=self.action_functi... | Returns a list of opcodes. Opcodes are the same as defined by
:py:mod:`difflib`. |
def get(self, key, get_cas=False):
"""
Get a key from server.
:param key: Key's name
:type key: six.string_types
:param get_cas: If true, return (value, cas), where cas is the new CAS value.
:type get_cas: boolean
:return: Returns a key data from server.
... | Get a key from server.
:param key: Key's name
:type key: six.string_types
:param get_cas: If true, return (value, cas), where cas is the new CAS value.
:type get_cas: boolean
:return: Returns a key data from server.
:rtype: object |
def create_ogr_field_from_definition(field_definition):
"""Helper to create a field from definition.
:param field_definition: The definition of the field (see:
safe.definitions.fields).
:type field_definition: dict
:return: The new ogr field definition.
:rtype: ogr.FieldDefn
"""
if... | Helper to create a field from definition.
:param field_definition: The definition of the field (see:
safe.definitions.fields).
:type field_definition: dict
:return: The new ogr field definition.
:rtype: ogr.FieldDefn |
def create_object(self, data, view_kwargs):
"""Create an object through sqlalchemy
:param dict data: the data validated by marshmallow
:param dict view_kwargs: kwargs from the resource view
:return DeclarativeMeta: an object from sqlalchemy
"""
self.before_create_object(... | Create an object through sqlalchemy
:param dict data: the data validated by marshmallow
:param dict view_kwargs: kwargs from the resource view
:return DeclarativeMeta: an object from sqlalchemy |
def select(self, selection_specs=None, **kwargs):
"""Applies selection by dimension name
Applies a selection along the dimensions of the object using
keyword arguments. The selection may be narrowed to certain
objects using selection_specs. For container objects the
selection wi... | Applies selection by dimension name
Applies a selection along the dimensions of the object using
keyword arguments. The selection may be narrowed to certain
objects using selection_specs. For container objects the
selection will be applied to all children as well.
Selections ma... |
def outer_product(vec0: QubitVector, vec1: QubitVector) -> QubitVector:
"""Direct product of qubit vectors
The tensor ranks must match and qubits must be disjoint.
"""
R = vec0.rank
R1 = vec1.rank
N0 = vec0.qubit_nb
N1 = vec1.qubit_nb
if R != R1:
raise ValueError('Incompatibly... | Direct product of qubit vectors
The tensor ranks must match and qubits must be disjoint. |
def send_extended(self, address, timestamp, value):
"""Queue an extended datapoint (ie. a string), return True/False for success.
Arguments:
address -- uint64_t representing a unique metric.
timestamp -- uint64_t representing number of nanoseconds (10^-9) since epoch.
value -- s... | Queue an extended datapoint (ie. a string), return True/False for success.
Arguments:
address -- uint64_t representing a unique metric.
timestamp -- uint64_t representing number of nanoseconds (10^-9) since epoch.
value -- string value being stored. |
def create_fd (self):
"""Create open file descriptor."""
if self.filename is None:
return i18n.get_encoded_writer(encoding=self.output_encoding,
errors=self.codec_errors)
return codecs.open(self.filename, "wb", self.output_encoding,
... | Create open file descriptor. |
def plugins_show(what=None, name=None, version=None, details=False):
"""
Show details of available plugins
Parameters
----------
what: Class of plugins e.g., backend
name: Name of the plugin e.g., s3
version: Version of the plugin
details: Show details be shown?
"""
global plug... | Show details of available plugins
Parameters
----------
what: Class of plugins e.g., backend
name: Name of the plugin e.g., s3
version: Version of the plugin
details: Show details be shown? |
def _wmorlet(f0, sd, sampling_rate, ns=5):
"""
adapted from nitime
returns a complex morlet wavelet in the time domain
Parameters
----------
f0 : center frequency
sd : standard deviation of frequency
sampling_rate : samplingrate
ns : window length in number of stand... | adapted from nitime
returns a complex morlet wavelet in the time domain
Parameters
----------
f0 : center frequency
sd : standard deviation of frequency
sampling_rate : samplingrate
ns : window length in number of standard deviations |
def pdf(self, mu):
"""
PDF for Poisson prior
Parameters
----------
mu : float
Latent variable for which the prior is being formed over
Returns
----------
- p(mu)
"""
if self.transform is not None:
mu = self.transfo... | PDF for Poisson prior
Parameters
----------
mu : float
Latent variable for which the prior is being formed over
Returns
----------
- p(mu) |
def add_group(self, name, desc, status):
"""
Add a new group to a network.
"""
existing_group = get_session().query(ResourceGroup).filter(ResourceGroup.name==name, ResourceGroup.network_id==self.id).first()
if existing_group is not None:
raise HydraError("A resou... | Add a new group to a network. |
def component_acting_parent_tag(parent_tag, tag):
"""
Only intended for use in getting components, look for tag name of fig-group
and if so, find the first fig tag inside it as the acting parent tag
"""
if parent_tag.name == "fig-group":
if (len(tag.find_previous_siblings("fig")) > 0):
... | Only intended for use in getting components, look for tag name of fig-group
and if so, find the first fig tag inside it as the acting parent tag |
def face_adjacency(faces=None,
mesh=None,
return_edges=False):
"""
Returns an (n,2) list of face indices.
Each pair of faces in the list shares an edge, making them adjacent.
Parameters
----------
faces : (n, 3) int, or None
List of vertex indices ... | Returns an (n,2) list of face indices.
Each pair of faces in the list shares an edge, making them adjacent.
Parameters
----------
faces : (n, 3) int, or None
List of vertex indices representing triangles
mesh : Trimesh object
If passed will used cached edges instead of faces
re... |
def mmGetPlotConnectionsPerColumn(self, title="Connections per Columns"):
"""
Returns plot of # connections per column.
@return (Plot) plot
"""
plot = Plot(self, title)
connectedCounts = numpy.ndarray(self.getNumColumns(), dtype=uintType)
self.getConnectedCounts(connectedCounts)
plot.a... | Returns plot of # connections per column.
@return (Plot) plot |
def simplify_types(types):
# type: (Iterable[AbstractType]) -> List[AbstractType]
"""Given some types, give simplified types representing the union of types."""
flattened = flatten_types(types)
items = filter_ignored_items(flattened)
items = [simplify_recursive(item) for item in items]
items = m... | Given some types, give simplified types representing the union of types. |
def create_build_system(working_dir, buildsys_type=None, package=None, opts=None,
write_build_scripts=False, verbose=False,
build_args=[], child_build_args=[]):
"""Return a new build system that can build the source in working_dir."""
from rez.plugin_managers impo... | Return a new build system that can build the source in working_dir. |
def find(self, **filter_args):
"""
Find exactly one resource in scope of this manager, by matching
resource properties against the specified filter arguments, and return
its Python resource object (e.g. for a CPC, a :class:`~zhmcclient.Cpc`
object is returned).
Any resou... | Find exactly one resource in scope of this manager, by matching
resource properties against the specified filter arguments, and return
its Python resource object (e.g. for a CPC, a :class:`~zhmcclient.Cpc`
object is returned).
Any resource property may be specified in a filter argument.... |
def _host_libc(self):
"""Use the --libc-dir option if provided, otherwise invoke a host compiler to find libc dev."""
libc_dir_option = self.get_options().libc_dir
if libc_dir_option:
maybe_libc_crti = os.path.join(libc_dir_option, self._LIBC_INIT_OBJECT_FILE)
if os.path.isfile(maybe_libc_crti):... | Use the --libc-dir option if provided, otherwise invoke a host compiler to find libc dev. |
def _addSpecfile(self, specfile, path):
"""Adds a new specfile entry to SiiContainer.info. See also
:class:`SiiContainer.addSpecfile()`.
:param specfile: the name of an ms-run file
:param path: filedirectory for loading and saving the ``siic`` files
"""
self.info[specfil... | Adds a new specfile entry to SiiContainer.info. See also
:class:`SiiContainer.addSpecfile()`.
:param specfile: the name of an ms-run file
:param path: filedirectory for loading and saving the ``siic`` files |
def set_preference(data, chunk_size):
"""Return the median of the distribution of pairwise L2 Euclidean distances
between samples (the rows of 'data') as the default preference parameter
for Affinity Propagation clustering.
Parameters
----------
data : array of shape (N_samples, N_feat... | Return the median of the distribution of pairwise L2 Euclidean distances
between samples (the rows of 'data') as the default preference parameter
for Affinity Propagation clustering.
Parameters
----------
data : array of shape (N_samples, N_features)
The data-set submitted for Affi... |
def create(self, **kwargs):
"""
Creates a new resource.
:param kwargs: The properties of the resource
:return: The created item returned by the API
wrapped as a `Model` object
"""
response = self.ghost.execute_post('%s/' % self._type_name, json={
... | Creates a new resource.
:param kwargs: The properties of the resource
:return: The created item returned by the API
wrapped as a `Model` object |
def institute(self, institute_id):
"""Featch a single institute from the backend
Args:
institute_id(str)
Returns:
Institute object
"""
LOG.debug("Fetch institute {}".format(institute_id))
institute_obj = self.institute_collection.... | Featch a single institute from the backend
Args:
institute_id(str)
Returns:
Institute object |
def cholesky(L, b, P=None):
'''
P A P' = L L'
'''
logger.debug('Solving system of dim {} with cholesky factors'.format(len(b)))
## convert L and U to csr format
is_csr = scipy.sparse.isspmatrix_csr(L)
is_csc = scipy.sparse.isspmatrix_csc(L)
if not is_csr and not is_csc:
warnin... | P A P' = L L' |
def wasb_log_exists(self, remote_log_location):
"""
Check if remote_log_location exists in remote storage
:param remote_log_location: log's location in remote storage
:return: True if location exists else False
"""
try:
return self.hook.check_for_blob(self.was... | Check if remote_log_location exists in remote storage
:param remote_log_location: log's location in remote storage
:return: True if location exists else False |
def forward(self, x, w):
"""Forward function.
:param x: Feature indices.
:type x: torch.Tensor of shape (batch_size * length)
:param w: Feature weights.
:type w: torch.Tensor of shape (batch_size * length)
:return: Output of linear layer.
:rtype: torch.Tensor of ... | Forward function.
:param x: Feature indices.
:type x: torch.Tensor of shape (batch_size * length)
:param w: Feature weights.
:type w: torch.Tensor of shape (batch_size * length)
:return: Output of linear layer.
:rtype: torch.Tensor of shape (batch_size, num_classes) |
def bin_number(datapoint, intervals):
"""
Given a datapoint and intervals representing bins, returns the number
represented in binned form, where the bin including the value is
set to 1 and all others are 0.
"""
index = numpy.searchsorted(intervals, datapoint)
return [0 if index != i else 1 for i in rang... | Given a datapoint and intervals representing bins, returns the number
represented in binned form, where the bin including the value is
set to 1 and all others are 0. |
def validate(self):
"""Validate the resource using its voluptuous schema"""
try:
# update _resource to have default values from the schema
self._resource = self.schema(self._resource)
except MultipleInvalid as e:
errors = [format_error(err, self.resource_type)... | Validate the resource using its voluptuous schema |
async def _get_response(self, message):
"""
Get response running the view with await syntax if it is a
coroutine function, otherwise just run it the normal way.
"""
view = self.discovery_view(message)
if not view:
return
if inspect.iscoroutinefunctio... | Get response running the view with await syntax if it is a
coroutine function, otherwise just run it the normal way. |
def _connect_signals(self, model):
"""
Connect signals for the model.
"""
for signal in self._signals:
receiver = self._signals[signal]
signal.connect(receiver, sender=model, dispatch_uid=self._dispatch_uid(signal, model)) | Connect signals for the model. |
def count_divisors(n):
""" Count the number of divisors of an integer n
Args:
n (int): strictly positive integer
Returns:
The number of distinct divisors of n
Raises:
TypeError: if n is not an integer
ValueError: if n is negative
"""
if not isinstance(n, int... | Count the number of divisors of an integer n
Args:
n (int): strictly positive integer
Returns:
The number of distinct divisors of n
Raises:
TypeError: if n is not an integer
ValueError: if n is negative |
def xpathNextAncestor(self, cur):
"""Traversal function for the "ancestor" direction the
ancestor axis contains the ancestors of the context node;
the ancestors of the context node consist of the parent of
context node and the parent's parent and so on; the nodes
are orde... | Traversal function for the "ancestor" direction the
ancestor axis contains the ancestors of the context node;
the ancestors of the context node consist of the parent of
context node and the parent's parent and so on; the nodes
are ordered in reverse document order; thus the paren... |
def wnexpd(left, right, window):
"""
Expand each of the intervals of a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnexpd_c.html
:param left: Amount subtracted from each left endpoint.
:type left: float
:param right: Amount added to each right endpoint.
... | Expand each of the intervals of a double precision window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnexpd_c.html
:param left: Amount subtracted from each left endpoint.
:type left: float
:param right: Amount added to each right endpoint.
:type right: float
:param window: Wind... |
def _partition_estimators(n_estimators, n_jobs):
"""Private function used to partition estimators between jobs."""
# Compute the number of jobs
if n_jobs == -1:
n_jobs = min(cpu_count(), n_estimators)
else:
n_jobs = min(n_jobs, n_estimators)
# Partition estimators between jobs
... | Private function used to partition estimators between jobs. |
def append_main_thread(self):
"""create & start main thread
:return: None
"""
thread = MainThread(main_queue=self.main_queue,
main_spider=self.main_spider,
branch_spider=self.branch_spider)
thread.daemon = True
thre... | create & start main thread
:return: None |
def idf(posting, document_count):
"""A function to calculate the inverse document frequency for a posting.
This is shared between the builder and the index.
"""
documents_with_term = 0
for field_name in posting:
if field_name == "_index":
continue
documents_with_term += l... | A function to calculate the inverse document frequency for a posting.
This is shared between the builder and the index. |
def wiki_pages(self, extra_params=None):
"""
All Wiki Pages with access to this Space
"""
return self.api._get_json(
WikiPage,
space=self,
rel_path=self._build_rel_path('wiki_pages'),
extra_params=extra_params,
) | All Wiki Pages with access to this Space |
def transition(prior_state, next_state):
"""
Transitions to a non-standard state
Raises InvalidStateTransition if next_state is not allowed.
:param prior_state: <str>
:param next_state: <str>
:return: <str>
"""
if next_state not in STATES[prior_state][TRANSITION]:
acceptable = ... | Transitions to a non-standard state
Raises InvalidStateTransition if next_state is not allowed.
:param prior_state: <str>
:param next_state: <str>
:return: <str> |
def to_(self, attrvals):
""" Create a list of Attribute instances.
:param attrvals: A dictionary of attributes and values
:return: A list of Attribute instances
"""
attributes = []
for key, value in attrvals.items():
key = key.lower()
attributes.a... | Create a list of Attribute instances.
:param attrvals: A dictionary of attributes and values
:return: A list of Attribute instances |
def get_reminders_per_page(self, per_page=1000, page=1, params=None):
"""
Get reminders per page
:param per_page: How many objects per page. Default: 1000
:param page: Which page. Default: 1
:param params: Search parameters. Default: {}
:return: list
"""
... | Get reminders per page
:param per_page: How many objects per page. Default: 1000
:param page: Which page. Default: 1
:param params: Search parameters. Default: {}
:return: list |
def protege_data(datas_str, sens):
"""
Used to crypt/decrypt data before saving locally.
Override if securit is needed.
bytes -> str when decrypting
str -> bytes when crypting
:param datas_str: When crypting, str. when decrypting bytes
:param sens: True to crypt, False to decrypt
"""
... | Used to crypt/decrypt data before saving locally.
Override if securit is needed.
bytes -> str when decrypting
str -> bytes when crypting
:param datas_str: When crypting, str. when decrypting bytes
:param sens: True to crypt, False to decrypt |
async def kick(self, channel, target, reason=None):
""" Kick user from channel. """
if not self.in_channel(channel):
raise NotInChannel(channel)
if reason:
await self.rawmsg('KICK', channel, target, reason)
else:
await self.rawmsg('KICK', channel, tar... | Kick user from channel. |
def createStatus(self,
change_id,
revision_id,
name,
value,
abstain=None,
rerun=None,
comment=None,
url=None,
reporter=None,
... | Abstract the POST REST api documented here:
https://gerrit.googlesource.com/plugins/verify-status/+/master/src/main/resources/Documentation/rest-api-changes.md
:param change_id: The change_id for the change tested (can be in the long form e.g:
myProject~master~I8473b95934b5732ac55d26311a706... |
def complain(self, id, is_spam):
""" http://api.yandex.ru/cleanweb/doc/dg/concepts/complain.xml"""
r = self.request('post', 'http://cleanweb-api.yandex.ru/1.0/complain',
data={'id': id, 'spamtype': 'spam' if is_spam else 'ham'})
return True | http://api.yandex.ru/cleanweb/doc/dg/concepts/complain.xml |
def create(path, value='', acls=None, ephemeral=False, sequence=False, makepath=False, profile=None,
hosts=None, scheme=None, username=None, password=None, default_acl=None):
'''
Create Znode
path
path of znode to create
value
value to assign to znode (Default: '')
acls... | Create Znode
path
path of znode to create
value
value to assign to znode (Default: '')
acls
list of acl dictionaries to be assigned (Default: None)
ephemeral
indicate node is ephemeral (Default: False)
sequence
indicate node is suffixed with a unique inde... |
def _create_polynomial_model(
name: str,
symbol: str,
degree: int,
ds: DataSet,
dss: dict):
"""
Create a polynomial model to describe the specified property based on the
specified data set, and save it to a .json file.
:param name: material name.
:param symbol: property symbol.
... | Create a polynomial model to describe the specified property based on the
specified data set, and save it to a .json file.
:param name: material name.
:param symbol: property symbol.
:param degree: polynomial degree.
:param ds: the source data set.
:param dss: dictionary of all datasets. |
def excluded(filename):
"""
Check if options.exclude contains a pattern that matches filename.
"""
basename = os.path.basename(filename)
for pattern in options.exclude:
if fnmatch(basename, pattern):
# print basename, 'excluded because it matches', pattern
return True | Check if options.exclude contains a pattern that matches filename. |
def save(self, *args, **kwargs):
"""
Extends model ``save()`` to allow dynamic geocoding
"""
self.geocode()
return super(GeoMixin, self).save(*args, **kwargs) | Extends model ``save()`` to allow dynamic geocoding |
def __getHyperSearchJobIDFilePath(cls, permWorkDir, outputLabel):
"""Returns filepath where to store HyperSearch JobID
Parameters:
----------------------------------------------------------------------
permWorkDir: Directory path for saved jobID file
outputLabel: Label string for incorporating into... | Returns filepath where to store HyperSearch JobID
Parameters:
----------------------------------------------------------------------
permWorkDir: Directory path for saved jobID file
outputLabel: Label string for incorporating into file name for saved jobID
retval: Filepath where to store Hyper... |
def delete_exchange_for_vhost(self, exchange, vhost, if_unused=False):
"""
Delete an individual exchange. You can add the parameter
``if_unused=True``. This prevents the delete from succeeding if the
exchange is bound to a queue or as a source to another exchange.
:param exchang... | Delete an individual exchange. You can add the parameter
``if_unused=True``. This prevents the delete from succeeding if the
exchange is bound to a queue or as a source to another exchange.
:param exchange: The exchange name
:type exchange: str
:param vhost: The vhost name
... |
def open(self):
"""Implementation of NAPALM method open."""
try:
connection = self.transport_class(
host=self.hostname,
username=self.username,
password=self.password,
timeout=self.timeout,
**self.eapi_kwargs
... | Implementation of NAPALM method open. |
def set_velocities(self, velocities):
"""
:param velocities (au): list of list of atom velocities
:return:
"""
assert len(velocities) == len(self.mol)
self.params["velocity"] = velocities | :param velocities (au): list of list of atom velocities
:return: |
def _apply_to_data(data, func, unpack_dict=False):
"""Apply a function to data, trying to unpack different data
types.
"""
apply_ = partial(_apply_to_data, func=func, unpack_dict=unpack_dict)
if isinstance(data, dict):
if unpack_dict:
return [apply_(v) for v in data.values()]
... | Apply a function to data, trying to unpack different data
types. |
def weights_prepend_inputs_to_targets(labels):
"""Assign weight 1.0 to only the "targets" portion of the labels.
Weight 1.0 is assigned to all nonzero labels past the first zero.
See prepend_mode in common_hparams.py
Args:
labels: A Tensor of int32s.
Returns:
A Tensor of floats.
"""
past_first_... | Assign weight 1.0 to only the "targets" portion of the labels.
Weight 1.0 is assigned to all nonzero labels past the first zero.
See prepend_mode in common_hparams.py
Args:
labels: A Tensor of int32s.
Returns:
A Tensor of floats. |
def _add_person_to_group(person, group):
""" Call datastores after adding a person to a group. """
from karaage.datastores import add_accounts_to_group
from karaage.datastores import add_accounts_to_project
from karaage.datastores import add_accounts_to_institute
a_list = person.account_set
add... | Call datastores after adding a person to a group. |
def add_latlonalt(self, lat, lon, altitude, terrain_alt=False):
'''add a point via latitude/longitude/altitude'''
if terrain_alt:
frame = mavutil.mavlink.MAV_FRAME_GLOBAL_TERRAIN_ALT
else:
frame = mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT
p = mavutil.mavlink.M... | add a point via latitude/longitude/altitude |
def get_route(ip):
'''
Return routing information for given destination ip
.. versionadded:: 2016.11.5
CLI Example::
salt '*' network.get_route 10.10.10.10
'''
cmd = 'Find-NetRoute -RemoteIPAddress {0}'.format(ip)
out = __salt__['cmd.run'](cmd, shell='powershell', python_shell=Tru... | Return routing information for given destination ip
.. versionadded:: 2016.11.5
CLI Example::
salt '*' network.get_route 10.10.10.10 |
def _explain(self, tree):
""" Set up the engine to do a dry run of a query """
self._explaining = True
self._call_list = []
old_call = self.connection.call
def fake_call(command, **kwargs):
""" Replacement for connection.call that logs args """
if command... | Set up the engine to do a dry run of a query |
def _run_checks(self):
'''basic sanity checks for the file name (and others if needed) before
attempting parsing.
'''
if self.recipe is not None:
# Does the recipe provided exist?
if not os.path.exists(self.recipe):
bot.error("Cannot find %s, i... | basic sanity checks for the file name (and others if needed) before
attempting parsing. |
def _filter_by_pattern(self, pattern):
"""Filter the Filter the Data Collection based on a list of booleans."""
try:
_len = len(pattern)
except TypeError:
raise TypeError("pattern is not a list of Booleans. Got {}".format(
type(pattern)))
_filt_val... | Filter the Filter the Data Collection based on a list of booleans. |
def do_handshake(self):
"""Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer.
"""
_logger.debug("Initiating handshake...")
try:
self._wrap_socket_library_call(
lam... | Perform a handshake with the peer
This method forces an explicit handshake to be performed with either
the client or server peer. |
def merge_partition(self, partition, path, value):
"""
Merge a value into a partition for a key path.
"""
dct = self.partitions[partition]
*heads, tail = path
for part in heads:
dct = dct.setdefault(part, dict())
dct[tail] = value | Merge a value into a partition for a key path. |
def run(
self,
inputs: Dict[str, Union[float, Iterable]],
covers: Dict[str, Union[float, Iterable]],
torch_size: Optional[int] = None,
) -> Union[float, Iterable]:
"""Executes the FIB over a particular set of inputs and returns the
result.
Args:
in... | Executes the FIB over a particular set of inputs and returns the
result.
Args:
inputs: Input set where keys are the names of input nodes in the
GrFN and each key points to a set of input values (or just one).
Returns:
A set of outputs from executing the GrFN... |
def configuration(t0: date, t1: Optional[date] = None,
steps_per_day: int = None) -> Tuple[np.ndarray, np.ndarray]:
"""
Get the positions and velocities of the sun and eight planets
Returned as a tuple q, v
q: Nx3 array of positions (x, y, z) in the J2000.0 coordinate frame.
"""
... | Get the positions and velocities of the sun and eight planets
Returned as a tuple q, v
q: Nx3 array of positions (x, y, z) in the J2000.0 coordinate frame. |
def readline(self, limit=-1, delim=b'\n'):
"""Read a single line.
If EOF is reached before a full line can be read, a partial line is
returned. If *limit* is specified, at most this many bytes will be read.
"""
self._check_readable()
chunks = []
while True:
... | Read a single line.
If EOF is reached before a full line can be read, a partial line is
returned. If *limit* is specified, at most this many bytes will be read. |
def iter_all_repos(self, number=-1, since=None, etag=None, per_page=None):
"""Iterate over every repository in the order they were created.
:param int number: (optional), number of repositories to return.
Default: -1, returns all of them
:param int since: (optional), last repository... | Iterate over every repository in the order they were created.
:param int number: (optional), number of repositories to return.
Default: -1, returns all of them
:param int since: (optional), last repository id seen (allows
restarting this iteration)
:param str etag: (opti... |
def cmd(send, msg, args):
"""
Search the Twitter API.
Syntax: {command} <query> <--user username> <--count 1>
"""
if not msg:
send('What do you think I am, a bird?')
return
parser = arguments.ArgParser(args['config'])
parser.add_argument('query', nargs='*')
group = parse... | Search the Twitter API.
Syntax: {command} <query> <--user username> <--count 1> |
def p_unary_6(self, program):
"""
unary : id '(' expression ')'
"""
# note this is a semantic check, not syntactic
if program[1].name not in self.external_functions:
raise QasmError("Illegal external function call: ",
str(program[1].name... | unary : id '(' expression ')' |
def calc_cost(y, yhat, cost_matrix):
"""Calculate the cost with given cost matrix
y : ground truth
yhat : estimation
cost_matrix : array-like, shape=(n_classes, n_classes)
The ith row, jth column represents the cost of the ground truth being
ith class and prediction as jth class.
... | Calculate the cost with given cost matrix
y : ground truth
yhat : estimation
cost_matrix : array-like, shape=(n_classes, n_classes)
The ith row, jth column represents the cost of the ground truth being
ith class and prediction as jth class. |
def finish_registration(self, heart):
"""Second half of engine registration, called after our HeartMonitor
has received a beat from the Engine's Heart."""
try:
(eid,queue,reg,purge) = self.incoming_registrations.pop(heart)
except KeyError:
self.log.error("registra... | Second half of engine registration, called after our HeartMonitor
has received a beat from the Engine's Heart. |
def kibana_config(self):
"""
config kibana
:return:
"""
uncomment("/etc/kibana/kibana.yml", "#server.host:", use_sudo=True)
sed('/etc/kibana/kibana.yml', 'server.host:.*',
'server.host: "{0}"'.format(env.host_string), use_sudo=True)
sudo('systemctl st... | config kibana
:return: |
def _validate_auths(self, path, obj, app):
""" make sure that apiKey and basicAuth are empty list
in Operation object.
"""
errs = []
for k, v in six.iteritems(obj.authorizations or {}):
if k not in app.raw.authorizations:
errs.append('auth {0} not fou... | make sure that apiKey and basicAuth are empty list
in Operation object. |
def _get_line_array_construct(self):
""" Returns a construct for an array of line data.
"""
from_bus = integer.setResultsName("fbus")
to_bus = integer.setResultsName("tbus")
s_rating = real.setResultsName("s_rating") # MVA
v_rating = real.setResultsName("v_rating") # kV
... | Returns a construct for an array of line data. |
def _compute_metric_names(self):
"""Computes the list of metric names from all the scalar (run, tag) pairs.
The return value is a list of (tag, group) pairs representing the metric
names. The list is sorted in Python tuple-order (lexicographical).
For example, if the scalar (run, tag) pairs are:
(... | Computes the list of metric names from all the scalar (run, tag) pairs.
The return value is a list of (tag, group) pairs representing the metric
names. The list is sorted in Python tuple-order (lexicographical).
For example, if the scalar (run, tag) pairs are:
("exp/session1", "loss")
("exp/sessio... |
def yearly_plots(
df,
variable,
renormalize = True,
horizontal_axis_labels_days = False,
horizontal_axis_labels_months = True,
plot = True,
scatter = False,
linestyle = "-",
linewidth ... | Create yearly plots of a variable in a DataFrame, optionally renormalized.
It is assumed that the DataFrame index is datetime. |
def _validate_names(self, name=None, names=None, deep=False):
"""
Handles the quirks of having a singular 'name' parameter for general
Index and plural 'names' parameter for MultiIndex.
"""
from copy import deepcopy
if names is not None and name is not None:
r... | Handles the quirks of having a singular 'name' parameter for general
Index and plural 'names' parameter for MultiIndex. |
def date(ctx, year, month, day):
"""
Defines a date value
"""
return _date(conversions.to_integer(year, ctx), conversions.to_integer(month, ctx), conversions.to_integer(day, ctx)) | Defines a date value |
def path(self):
'''Path (list of nodes and actions) from root to this node.'''
node = self
path = []
while node:
path.append((node.action, node.state))
node = node.parent
return list(reversed(path)) | Path (list of nodes and actions) from root to this node. |
def mutate(self, info_in):
"""Replicate an info + mutation.
To mutate an info, that info must have a method called
``_mutated_contents``.
"""
# check self is not failed
if self.failed:
raise ValueError("{} cannot mutate as it has failed.".format(self))
... | Replicate an info + mutation.
To mutate an info, that info must have a method called
``_mutated_contents``. |
def ball(rmax=3, rmin=0, shape=128, limits=[-4, 4], draw=True, show=True, **kwargs):
"""Show a ball."""
import ipyvolume.pylab as p3
__, __, __, r, _theta, _phi = xyz(shape=shape, limits=limits, spherical=True)
data = r * 0
data[(r < rmax) & (r >= rmin)] = 0.5
if "data_min" not in kwargs:
... | Show a ball. |
def newText(content):
"""Creation of a new text node. """
ret = libxml2mod.xmlNewText(content)
if ret is None:raise treeError('xmlNewText() failed')
return xmlNode(_obj=ret) | Creation of a new text node. |
def assign(self, V, py):
"""Store python value in Value
"""
if isinstance(py, (bytes, unicode)):
for i,C in enumerate(V['value.choices'] or self._choices):
if py==C:
V['value.index'] = i
return
# attempt to parse as int... | Store python value in Value |
def set_title(self, title):
"""Sets the title.
arg: title (string): the new title
raise: InvalidArgument - ``title`` is invalid
raise: NoAccess - ``Metadata.isReadOnly()`` is ``true``
raise: NullArgument - ``title`` is ``null``
*compliance: mandatory -- This method... | Sets the title.
arg: title (string): the new title
raise: InvalidArgument - ``title`` is invalid
raise: NoAccess - ``Metadata.isReadOnly()`` is ``true``
raise: NullArgument - ``title`` is ``null``
*compliance: mandatory -- This method must be implemented.* |
def explore_path_encompass(self, task_num, dirpath):
"""
Explore path to discover unsearched directories and save filepaths
:param task_num: Processor ID
:param dirpath: Tuple (base directory, path), path information pulled from unsearched Queue
:return: Directories to add to uns... | Explore path to discover unsearched directories and save filepaths
:param task_num: Processor ID
:param dirpath: Tuple (base directory, path), path information pulled from unsearched Queue
:return: Directories to add to unsearched Queue |
def _detect_content_type(self, filename):
'''Determine the mimetype for a file.
:param filename: Filename of file to detect.
'''
name, ext = os.path.splitext(filename)
if not ext:
raise MessageError('File requires an extension.')
ext = ext.lower()
if ... | Determine the mimetype for a file.
:param filename: Filename of file to detect. |
def get_html_column(self):
""" Get a HTML column for this panel. """
panel_id = "panel_{}".format(self.name)
return ["<h2>{}</h2>".format(self.title) + '<a href="{}">Download data</a>'.format(self.tar_fn())] + [
# list of links
(" <br />" + os.linesep).join(
... | Get a HTML column for this panel. |
def channel_angle(im, chanapproxangle=None, *, isshiftdftedge=False,
truesize=None):
"""Extract the channel angle from the rfft
Parameters:
-----------
im: 2d array
The channel image
chanapproxangle: number, optional
If not None, an approximation of the result
... | Extract the channel angle from the rfft
Parameters:
-----------
im: 2d array
The channel image
chanapproxangle: number, optional
If not None, an approximation of the result
isshiftdftedge: boolean, default False
If The image has already been treated:
(edge, dft, ffts... |
def _process_feature_dbxref(self, limit):
"""
This is the mapping between the flybase features and external
repositories. Generally we want to leave the flybase feature id
as the primary identifier. But we need to make the equivalences/sameAs.
:param limit:
:return:
... | This is the mapping between the flybase features and external
repositories. Generally we want to leave the flybase feature id
as the primary identifier. But we need to make the equivalences/sameAs.
:param limit:
:return: |
def is_searchable(self):
"""A bool value that indicates whether the name is a valid name to
search by."""
first = alpha_chars(self.first or u'')
last = alpha_chars(self.last or u'')
raw = alpha_chars(self.raw or u'')
return (len(first) >= 2 and len(last) >= 2) or l... | A bool value that indicates whether the name is a valid name to
search by. |
def is_affirmative(self, section, option):
"""
Return true if the section option combo exists and it is set
to a truthy value.
"""
return self.has_option(section, option) and \
lib.is_affirmative(self.get(section, option)) | Return true if the section option combo exists and it is set
to a truthy value. |
def get_updates(
self,
display_all_distributions=False,
verbose=False
): # pragma: no cover
"""
When called, get the environment updates and write updates to a CSV
file and if a new config has been provided, write a new configuration
file.
Args:
... | When called, get the environment updates and write updates to a CSV
file and if a new config has been provided, write a new configuration
file.
Args:
display_all_distributions (bool): Return distribution even if it is
up-to-date.
verbose (bool): If ``True... |
def __isListOfTexts(self, docs):
""" Checks whether the input is a list of strings or Text-s;
"""
return isinstance(docs, list) and \
all(isinstance(d, (basestring, Text)) for d in docs) | Checks whether the input is a list of strings or Text-s; |
def simxGetCollisionHandle(clientID, collisionObjectName, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
handle = ct.c_int()
if (sys.version_info[0] == 3) and (type(collisionObjectName) is str):
collisionObjectName=collisionObje... | Please have a look at the function description/documentation in the V-REP user manual |
def draw(self, time, frametime, target):
"""
Fetch track value for every runnable effect.
If the value is > 0.5 we draw it.
"""
for effect in self.effects:
value = effect.rocket_timeline_track.time_value(time)
if value > 0.5:
effect... | Fetch track value for every runnable effect.
If the value is > 0.5 we draw it. |
def call_fn(self, what, *args, **kwargs):
""" Lazy call init_adapter then call the function """
logger.debug('f_{0}:{1}{2}({3})'.format(
self.call_stack_level,
' ' * 4 * self.call_stack_level,
what,
arguments_as_string(args, kwargs)))
port, fn_name... | Lazy call init_adapter then call the function |
def create_tag(self, tag_name=None, **properties):
"""Creates a tag and adds it to the tag table of the TextBuffer.
:param str tag_name:
Name of the new tag, or None
:param **properties:
Keyword list of properties and their values
:returns:
A new tag.... | Creates a tag and adds it to the tag table of the TextBuffer.
:param str tag_name:
Name of the new tag, or None
:param **properties:
Keyword list of properties and their values
:returns:
A new tag.
This is equivalent to creating a Gtk.TextTag and the... |
def get_dataset(self, key, info):
"""Get the data from the files."""
logger.debug("Getting raw data")
res = super(HRITGOESFileHandler, self).get_dataset(key, info)
self.mda['calibration_parameters'] = self._get_calibration_params()
res = self.calibrate(res, key.calibration)
... | Get the data from the files. |
def set_pending_boot_mode(self, boot_mode):
"""Sets the boot mode of the system for next boot.
:param boot_mode: either 'uefi' or 'legacy'.
:raises: IloInvalidInputError, on an invalid input.
:raises: IloError, on an error from iLO.
"""
sushy_system = self._get_sushy_sys... | Sets the boot mode of the system for next boot.
:param boot_mode: either 'uefi' or 'legacy'.
:raises: IloInvalidInputError, on an invalid input.
:raises: IloError, on an error from iLO. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.