code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def calc_custom(custom, genome, scaffold, sequence, scaffold_coverage, total_bases):
"""
custom = {(reads mapped to scaffold)/(total reads for sample)}/(length of scaffold)
"""
index = 0
if scaffold in scaffold_coverage: # what if the scaffold does not have bases mapped back to it? (this *should* not happen)
if ... | custom = {(reads mapped to scaffold)/(total reads for sample)}/(length of scaffold) |
def get_reports_by_type(self, account_id, report_type):
"""
Shows all reports of the passed report_type that have been run
for the canvas account id.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.index
"""
url = ACCOUNTS_API.format(ac... | Shows all reports of the passed report_type that have been run
for the canvas account id.
https://canvas.instructure.com/doc/api/account_reports.html#method.account_reports.index |
def start(self):
"""
Starts the timer from zero
"""
self.startTime = time.time()
self.configure(text='{0:<d} s'.format(0))
self.update() | Starts the timer from zero |
def finalized_canonical_averages_dtype(spanning_cluster=True):
"""
The NumPy Structured Array type for finalized canonical averages over
several runs
Helper function
Parameters
----------
spanning_cluster : bool, optional
Whether to detect a spanning cluster or not.
Default... | The NumPy Structured Array type for finalized canonical averages over
several runs
Helper function
Parameters
----------
spanning_cluster : bool, optional
Whether to detect a spanning cluster or not.
Defaults to ``True``.
Returns
-------
ret : list of pairs of str
... |
async def proxy_new(connection, flags, info, name, object_path, interface_name):
"""Asynchronously call the specified method on a DBus proxy object."""
future = Future()
cancellable = None
Gio.DBusProxy.new(
connection,
flags,
info,
name,
object_path,
inte... | Asynchronously call the specified method on a DBus proxy object. |
def sgd(grad, x, callback=None, num_iters=200, step_size=0.1, mass=0.9):
"""Stochastic gradient descent with momentum.
grad() must have signature grad(x, i), where i is the iteration number."""
velocity = np.zeros(len(x))
for i in range(num_iters):
g = grad(x, i)
if callback: callback(x,... | Stochastic gradient descent with momentum.
grad() must have signature grad(x, i), where i is the iteration number. |
def positions_func(inputs, pad=0):
"""
A layer filling i-th column of a 2D tensor with
1+ln(1+i) when it contains a meaningful symbol
and with 0 when it contains PAD
"""
position_inputs = kb.cumsum(kb.ones_like(inputs, dtype="float32"), axis=1)
position_inputs *= kb.cast(kb.not_equal(inputs,... | A layer filling i-th column of a 2D tensor with
1+ln(1+i) when it contains a meaningful symbol
and with 0 when it contains PAD |
def itertypes(iterable):
"""Iterates over an iterable containing either type objects or tuples of
type objects and yields once for every type object found."""
seen = set()
for entry in iterable:
if isinstance(entry, tuple):
for type_ in entry:
if type_ not in seen:
... | Iterates over an iterable containing either type objects or tuples of
type objects and yields once for every type object found. |
def atlas_peer_update_health( peer_hostport, received_response, peer_table=None ):
"""
Mark the given peer as alive at this time.
Update times at which we contacted it,
and update its health score.
Use the global health table by default,
or use the given health info if set.
"""
with A... | Mark the given peer as alive at this time.
Update times at which we contacted it,
and update its health score.
Use the global health table by default,
or use the given health info if set. |
def analyze(self):
"""Run analysis."""
precision = 'DP' if self.kernel.datatype == 'double' else 'SP'
self.calculate_cache_access()
self.results['max_perf'] = self.conv_perf(self.machine['clock'] * self.cores * \
self.machine['FLOPs per cycle'][precision]['total']) | Run analysis. |
def close(self):
"""Closes the SSH connection if the connection is UP."""
if not self.connected:
return None
if self.config is not None:
if self.config.changed() and not self.config.committed():
try:
self.config.discard() # if configur... | Closes the SSH connection if the connection is UP. |
def md5sum(self):
"""
Check to see if the file exists on the device
:return:
"""
cmd = 'show file {dir}:{bin} md5sum'.format(
dir=self.DESTDIR, bin=self.image)
run = self.device.api.exec_opcmd
try:
got = run(cmd)
return got.get... | Check to see if the file exists on the device
:return: |
def coordinator(self):
"""Get the current coordinator
Returns: the current coordinator id or None if it is unknown
"""
if self.coordinator_id is None:
return None
elif self._client.is_disconnected(self.coordinator_id):
self.coordinator_dead('Node Disconne... | Get the current coordinator
Returns: the current coordinator id or None if it is unknown |
def _setup_transport(self):
"""
Wrap the socket in an SSL object, either the
new Python 2.6 version, or the older Python 2.5 and
lower version.
"""
if HAVE_PY26_SSL:
if hasattr(self, 'sslopts'):
self.sslobj = ssl.wrap_socket(self.sock, **self.... | Wrap the socket in an SSL object, either the
new Python 2.6 version, or the older Python 2.5 and
lower version. |
def hash(self):
'''
:rtype: int
:return: hash of the field
'''
hashed = super(RandomBits, self).hash()
return khash(hashed, self._min_length, self._max_length, self._num_mutations, self._step, self._seed) | :rtype: int
:return: hash of the field |
def get_jobs_events_from_sequence(user, sequence):
"""Get all the jobs events from a given sequence number."""
args = schemas.args(flask.request.args.to_dict())
if user.is_not_super_admin():
raise dci_exc.Unauthorized()
query = sql.select([models.JOBS_EVENTS]). \
select_from(models.JO... | Get all the jobs events from a given sequence number. |
def id_pools_ipv4_subnets(self):
"""
Gets the IdPoolsIpv4Subnets API client.
Returns:
IdPoolsIpv4Subnets:
"""
if not self.__id_pools_ipv4_subnets:
self.__id_pools_ipv4_subnets = IdPoolsIpv4Subnets(self.__connection)
return self.__id_pools_ipv4_sub... | Gets the IdPoolsIpv4Subnets API client.
Returns:
IdPoolsIpv4Subnets: |
def create_user(user, name, create=None): # noqa: E501
"""Create a new script
Create a new script # noqa: E501
:param user: Get user with this name
:type user: str
:param name: Get status of a driver with this name
:type name: str
:param create: The data needed to create this user
:ty... | Create a new script
Create a new script # noqa: E501
:param user: Get user with this name
:type user: str
:param name: Get status of a driver with this name
:type name: str
:param create: The data needed to create this user
:type create: dict | bytes
:rtype: Response |
def _grab_concretization_results(cls, state):
"""
Grabs the concretized result so we can add the constraint ourselves.
"""
# only grab ones that match the constrained addrs
if cls._should_add_constraints(state):
addr = state.inspect.address_concretization_expr
... | Grabs the concretized result so we can add the constraint ourselves. |
def f_i18n_citation_type(string, lang="eng"):
""" Take a string of form %citation_type|passage% and format it for human
:param string: String of formation %citation_type|passage%
:param lang: Language to translate to
:return: Human Readable string
.. note :: To Do : Use i18n tools and provide real... | Take a string of form %citation_type|passage% and format it for human
:param string: String of formation %citation_type|passage%
:param lang: Language to translate to
:return: Human Readable string
.. note :: To Do : Use i18n tools and provide real i18n |
async def get_friendly_name(self) -> Text:
"""
The friendly name is mapped to Facebook's first name. If the first
name is missing, use the last name.
"""
u = await self._get_user()
f = u.get('first_name', '').strip()
l = u.get('last_name', '').strip()
ret... | The friendly name is mapped to Facebook's first name. If the first
name is missing, use the last name. |
def _adjust_prt_flds(self, kws_xlsx, desc2nts, shade_hdrgos):
"""Print user-requested fields or provided fields minus info fields."""
# Use xlsx prt_flds from the user, if provided
if "prt_flds" in kws_xlsx:
return kws_xlsx["prt_flds"]
# If the user did not provide specific f... | Print user-requested fields or provided fields minus info fields. |
def midi(self):
"""
Return the (nearest) MIDI note to the tone's frequency. This will be an
integer number in the range 0 to 127. If the frequency is outside the
range represented by MIDI notes (which is approximately 8Hz to 12.5KHz)
:exc:`ValueError` exception will be raised.
... | Return the (nearest) MIDI note to the tone's frequency. This will be an
integer number in the range 0 to 127. If the frequency is outside the
range represented by MIDI notes (which is approximately 8Hz to 12.5KHz)
:exc:`ValueError` exception will be raised. |
def _send(self, javascript):
"""
Establishes a socket connection to the zombie.js server and sends
Javascript instructions.
:param js: the Javascript string to execute
"""
# Prepend JS to switch to the proper client context.
message = """
var _ctx = ... | Establishes a socket connection to the zombie.js server and sends
Javascript instructions.
:param js: the Javascript string to execute |
def refresh(self):
"""Refresh the dev_info data used by get_value.
Only needed if you're not using subscriptions.
"""
j = self.vera_request(id='sdata', output_format='json').json()
devices = j.get('devices')
for device_data in devices:
if device_data.get('id'... | Refresh the dev_info data used by get_value.
Only needed if you're not using subscriptions. |
def similarity_by_path(sense1: "wn.Synset", sense2: "wn.Synset", option: str = "path") -> float:
"""
Returns maximum path similarity between two senses.
:param sense1: A synset.
:param sense2: A synset.
:param option: String, one of ('path', 'wup', 'lch').
:return: A float, similarity measureme... | Returns maximum path similarity between two senses.
:param sense1: A synset.
:param sense2: A synset.
:param option: String, one of ('path', 'wup', 'lch').
:return: A float, similarity measurement. |
def markdown_changelog(version: str, changelog: dict, header: bool = False) -> str:
"""
Generates a markdown version of the changelog. Takes a parsed changelog dict from
generate_changelog.
:param version: A string with the version number.
:param changelog: A dict from generate_changelog.
:para... | Generates a markdown version of the changelog. Takes a parsed changelog dict from
generate_changelog.
:param version: A string with the version number.
:param changelog: A dict from generate_changelog.
:param header: A boolean that decides whether a header should be included or not.
:return: The ma... |
def lastmod(self, tag):
"""Return the last modification of the entry."""
lastitems = EntryModel.objects.published().order_by('-modification_date').filter(tags=tag).only('modification_date')
return lastitems[0].modification_date | Return the last modification of the entry. |
def MeshArrows(*inputobj, **options):
"""
Build arrows representing displacements.
:param float s: cross-section size of the arrow
:param float rescale: apply a rescaling factor to the length
"""
s = options.pop("s", None)
scale = options.pop("scale", 1)
c = options.pop("c", "gray")... | Build arrows representing displacements.
:param float s: cross-section size of the arrow
:param float rescale: apply a rescaling factor to the length |
def start_capture(self, adapter_number, output_file):
"""
Starts a packet capture.
:param adapter_number: adapter number
:param output_file: PCAP destination file for the capture
"""
try:
adapter = self._ethernet_adapters[adapter_number]
except Index... | Starts a packet capture.
:param adapter_number: adapter number
:param output_file: PCAP destination file for the capture |
def snapshotToMovie(snap,filename,*args,**kwargs):
"""
NAME:
snapshotToMovie
PURPOSE:
turn a list of snapshots into a movie
INPUT:
snap - the snapshots (list)
filename - name of the file to save the movie to
framerate= in fps
bitrate= ?
th... | NAME:
snapshotToMovie
PURPOSE:
turn a list of snapshots into a movie
INPUT:
snap - the snapshots (list)
filename - name of the file to save the movie to
framerate= in fps
bitrate= ?
thumbnail=False : create thumbnail image (filename-extension+.jpg... |
def cp(source, bucket, checksum, key_prefix):
"""Create new bucket from all files in directory."""
from .models import Bucket
from .helpers import populate_from_path
for object_version in populate_from_path(
Bucket.get(bucket), source, checksum=checksum,
key_prefix=key_prefix):
... | Create new bucket from all files in directory. |
def parse_release_id(release_id):
"""
Parse release_id to parts:
{short, version, type}
or
{short, version, type, bp_short, bp_version, bp_type}
:param release_id: Release ID string
:type release_id: str
:rtype: dict
"""
if "@" in release_id:
release, base_product = rele... | Parse release_id to parts:
{short, version, type}
or
{short, version, type, bp_short, bp_version, bp_type}
:param release_id: Release ID string
:type release_id: str
:rtype: dict |
def member(Imported, **Config):
r"""Helps with adding imported members to Scripts.
Note:
Config depends upon the Imported. It could be that of a **task** or a **group**.
"""
__ec_member__ = Imported.__ec_member__
__ec_member__.Config.update(**Config)
state.ActiveModuleMemberQ.insert(0, __ec_member__) | r"""Helps with adding imported members to Scripts.
Note:
Config depends upon the Imported. It could be that of a **task** or a **group**. |
def dumps(data, escape=False, **kwargs):
"""A wrapper around `json.dumps` that can handle objects that json
module is not aware.
This function is aware of a list of custom serializers that can be
registered by the API user, making it possible to convert any kind
of object to types that the json lib... | A wrapper around `json.dumps` that can handle objects that json
module is not aware.
This function is aware of a list of custom serializers that can be
registered by the API user, making it possible to convert any kind
of object to types that the json library can handle. |
def keys(self, name_start, name_end, limit=10):
"""
Return a list of the top ``limit`` keys between ``name_start`` and
``name_end``
Similiar with **Redis.KEYS**
.. note:: The range is (``name_start``, ``name_end``]. ``name_start``
isn't in the range, but ``na... | Return a list of the top ``limit`` keys between ``name_start`` and
``name_end``
Similiar with **Redis.KEYS**
.. note:: The range is (``name_start``, ``name_end``]. ``name_start``
isn't in the range, but ``name_end`` is.
:param string name_start: The lower bound(not ... |
def decrypt(self, key, data, mode, padding):
# pylint: disable=unused-argument,no-self-use
"""Decrypt data using the supplied values.
:param bytes key: Loaded decryption key
:param bytes data: IV prepended to encrypted data
:param JavaMode mode: Decryption mode to use (not used ... | Decrypt data using the supplied values.
:param bytes key: Loaded decryption key
:param bytes data: IV prepended to encrypted data
:param JavaMode mode: Decryption mode to use (not used by :class:`JavaAsymmetricEncryptionAlgorithm`)
:param JavaPadding padding: Padding mode to use
... |
def _choose_random_direction(current_state_parts, batch_rank, seed=None):
"""Chooses a random direction in the event space."""
seed_gen = distributions.SeedStream(seed, salt='_choose_random_direction')
# Chooses the random directions across each of the input components.
rnd_direction_parts = [
tf.random.n... | Chooses a random direction in the event space. |
def data(self):
"""The data dictionary for this entity.
"""
return self.model.state.entity_data(
self.entity_type, self.entity_id, self._history_index) | The data dictionary for this entity. |
def add_metric(self, labels, value, created=None, timestamp=None):
"""Add a metric to the metric family.
Args:
labels: A list of label values
value: The value of the metric
created: Optional unix timestamp the child was created at.
"""
self.samples.append(S... | Add a metric to the metric family.
Args:
labels: A list of label values
value: The value of the metric
created: Optional unix timestamp the child was created at. |
def silence(cls, *modules, **kwargs):
"""
Args:
*modules: Modules, or names of modules to silence (by setting their log level to WARNING or above)
**kwargs: Pass as kwargs due to python 2.7, would be level=logging.WARNING otherwise
"""
level = kwargs.pop("level", ... | Args:
*modules: Modules, or names of modules to silence (by setting their log level to WARNING or above)
**kwargs: Pass as kwargs due to python 2.7, would be level=logging.WARNING otherwise |
def get_state_in_ec_string(self, ec_index, add_colour=True):
'''Get the state of the component in an execution context as a string.
@param ec_index The index of the execution context to check the state
in. This index is into the total array of contexts,
t... | Get the state of the component in an execution context as a string.
@param ec_index The index of the execution context to check the state
in. This index is into the total array of contexts,
that is both owned and participating contexts. If the
... |
def purge_docs(cls, app, env, docname): # pragma: no cover
"""Handler for Sphinx's env-purge-doc event.
This event is emitted when all traces of a source file should be cleaned
from the environment (that is, if the source file is removed, or before
it is freshly read). This is for exte... | Handler for Sphinx's env-purge-doc event.
This event is emitted when all traces of a source file should be cleaned
from the environment (that is, if the source file is removed, or before
it is freshly read). This is for extensions that keep their own caches
in attributes of the environm... |
def remove_tags(self, server, tags):
"""
Remove tags from a server.
- server: Server object or UUID string
- tags: list of Tag objects or strings
"""
uuid = str(server)
tags = [str(tag) for tag in tags]
url = '/server/{0}/untag/{1}'.format(uuid, ','.join... | Remove tags from a server.
- server: Server object or UUID string
- tags: list of Tag objects or strings |
def add_template_filter(self, func: Callable, name: Optional[str]=None) -> None:
"""Add a template filter.
This is designed to be used on the application directly. An
example usage,
.. code-block:: python
def to_upper(value):
return value.upper()
... | Add a template filter.
This is designed to be used on the application directly. An
example usage,
.. code-block:: python
def to_upper(value):
return value.upper()
app.add_template_filter(to_upper)
Arguments:
func: The function that... |
def update(self, date, data=None, inow=None):
"""
Update strategy. Updates prices, values, weight, etc.
"""
# resolve stale state
self.root.stale = False
# update helpers on date change
# also set newpt flag
newpt = False
if self.now == 0:
... | Update strategy. Updates prices, values, weight, etc. |
def _load_plt(self, filename):
"""Initialize Grid from gOpenMol plt file."""
g = gOpenMol.Plt()
g.read(filename)
grid, edges = g.histogramdd()
self.__init__(grid=grid, edges=edges, metadata=self.metadata) | Initialize Grid from gOpenMol plt file. |
def push(self, remote, branch=None):
'''Push a repository
:param remote: git-remote instance
:param branch: name of the branch to push
:return: PushInfo, git push output lines
'''
pb = ProgressBar()
pb.setup(self.name, ProgressBar.Action.PUSH)
if branch:
... | Push a repository
:param remote: git-remote instance
:param branch: name of the branch to push
:return: PushInfo, git push output lines |
def scroll_one_line_up(event):
"""
scroll_offset -= 1
"""
w = find_window_for_buffer_name(event.cli, event.cli.current_buffer_name)
b = event.cli.current_buffer
if w:
# When the cursor is at the bottom, move to the previous line. (Otherwise, only scroll.)
if w.render_info:
... | scroll_offset -= 1 |
def _iter_interleaved_items(self, elements):
"""Generate element or subtotal items in interleaved order.
This ordering corresponds to how value "rows" (or columns) are to
appear after subtotals have been inserted at their anchor locations.
Where more than one subtotal is anchored to the... | Generate element or subtotal items in interleaved order.
This ordering corresponds to how value "rows" (or columns) are to
appear after subtotals have been inserted at their anchor locations.
Where more than one subtotal is anchored to the same location, they
appear in their document or... |
def parse_component_reference(self, node):
"""
Parses <ComponentReference>
@param node: Node containing the <ComponentTypeRef> element
@type node: xml.etree.Element
"""
if 'name' in node.lattrib:
name = node.lattrib['name']
else:
self.rai... | Parses <ComponentReference>
@param node: Node containing the <ComponentTypeRef> element
@type node: xml.etree.Element |
def qos_map_cos_mutation_cos5(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
qos = ET.SubElement(config, "qos", xmlns="urn:brocade.com:mgmt:brocade-qos")
map = ET.SubElement(qos, "map")
cos_mutation = ET.SubElement(map, "cos-mutation")
n... | Auto Generated Code |
def findunique(lst, key):
"""
Find all unique key values for items in lst.
Parameters
----------
lst: list
A list of composite dictionaries e.g. ``layers``, ``classes``
key: string
The key name to search each dictionary in the list
Returns
-------
list
A ... | Find all unique key values for items in lst.
Parameters
----------
lst: list
A list of composite dictionaries e.g. ``layers``, ``classes``
key: string
The key name to search each dictionary in the list
Returns
-------
list
A sorted Python list of unique keys in t... |
def force_clean(self, remove_rw=False, allow_lazy=False, retries=5, sleep_interval=0.5):
"""Attempts to call the clean method, but will retry automatically if an error is raised. When the attempts
run out, it will raise the last error.
Note that the method will only catch :class:`ImageMounterEr... | Attempts to call the clean method, but will retry automatically if an error is raised. When the attempts
run out, it will raise the last error.
Note that the method will only catch :class:`ImageMounterError` exceptions.
:param bool remove_rw: indicates whether a read-write cache should be remo... |
def generate_iv_for_export(self, client_random, server_random,
con_end, read_or_write, req_len):
"""
Generate IV for EXPORT ciphersuite, i.e. weakens it.
An export IV generation example is given in section 6.3.1 of RFC 2246.
See also page 86 of EKR's book.
... | Generate IV for EXPORT ciphersuite, i.e. weakens it.
An export IV generation example is given in section 6.3.1 of RFC 2246.
See also page 86 of EKR's book. |
def run(self):
"""Create a type list."""
config = self.state.document.settings.env.config
# Group processes by category
processes = get_processes(config.autoprocess_process_dir, config.autoprocess_source_base_url)
processes.sort(key=itemgetter('type'))
processes_by_types... | Create a type list. |
def _debug_dump_dom(el):
"""Debugging helper. Prints out `el` contents."""
import xml.dom.minidom
s = [el.nodeName]
att_container = el.attributes
for i in range(att_container.length):
attr = att_container.item(i)
s.append(' @{a}="{v}"'.format(a=attr.name, v=attr.value))
for c in... | Debugging helper. Prints out `el` contents. |
def analyze(self, text, tokenizer=str.split):
"""Analyze text and return pretty format.
Args:
text: string, the input text.
tokenizer: Tokenize input sentence. Default tokenizer is `str.split`.
Returns:
res: dict.
"""
if not self.tagger:
... | Analyze text and return pretty format.
Args:
text: string, the input text.
tokenizer: Tokenize input sentence. Default tokenizer is `str.split`.
Returns:
res: dict. |
def projective_measurement_constraints(*parties):
"""Return a set of constraints that define projective measurements.
:param parties: Measurements of different parties.
:type A: list or tuple of list of list of
:class:`sympy.physics.quantum.operator.HermitianOperator`.
:returns: substitut... | Return a set of constraints that define projective measurements.
:param parties: Measurements of different parties.
:type A: list or tuple of list of list of
:class:`sympy.physics.quantum.operator.HermitianOperator`.
:returns: substitutions containing idempotency, orthogonality and
... |
def correction(self, word):
""" The most probable correct spelling for the word
Args:
word (str): The word to correct
Returns:
str: The most likely candidate """
return max(self.candidates(word), key=self.word_probability) | The most probable correct spelling for the word
Args:
word (str): The word to correct
Returns:
str: The most likely candidate |
def Read(self, timeout=None):
'''
Reads the context menu
:param timeout: Optional. Any value other than None indicates a non-blocking read
:return:
'''
if not self.Shown:
self.Shown = True
self.TrayIcon.show()
if timeout is None:
... | Reads the context menu
:param timeout: Optional. Any value other than None indicates a non-blocking read
:return: |
def system_info(query):
"""system_info(query) -- print system specific information like OS, kernel,
architecture etc.
"""
proc = subprocess.Popen(["uname -o"], stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
print "operating system : "+str(out),
proc = subprocess.Popen(["uname"], stdout=subp... | system_info(query) -- print system specific information like OS, kernel,
architecture etc. |
def put(self, key, value, cache=None, options={}):
"""Query the server to set the key specified to the value specified in
the specified cache.
Keyword arguments:
key -- the name of the key to be set. Required.
value -- the value to set key to. Must be a string or JSON
... | Query the server to set the key specified to the value specified in
the specified cache.
Keyword arguments:
key -- the name of the key to be set. Required.
value -- the value to set key to. Must be a string or JSON
serialisable. Required.
cache -- the cache to s... |
def analyze_theory(V, x0list=[], plot=False):
""" Extract ground-state energy E0 and psi**2 for potential V. """
# initialize path integral
T = 4.
ndT = 8. # use larger ndT to reduce discretization error (goes like 1/ndT**2)
neval = 3e5 # should probably use more evaluations (10x?)
nit... | Extract ground-state energy E0 and psi**2 for potential V. |
def makeLinearxFunc(self,mLvl,pLvl,MedShk,xLvl):
'''
Constructs the (unconstrained) expenditure function for this period using
bilinear interpolation (over permanent income and the medical shock) among
an array of linear interpolations over market resources.
Parameters
-... | Constructs the (unconstrained) expenditure function for this period using
bilinear interpolation (over permanent income and the medical shock) among
an array of linear interpolations over market resources.
Parameters
----------
mLvl : np.array
Corresponding market re... |
def split_by_percent(self, spin_systems_list):
"""Split list of spin systems by specified percentages.
:param list spin_systems_list: List of spin systems.
:return: List of spin systems divided into sub-lists corresponding to specified split percentages.
:rtype: :py:class:`list`
... | Split list of spin systems by specified percentages.
:param list spin_systems_list: List of spin systems.
:return: List of spin systems divided into sub-lists corresponding to specified split percentages.
:rtype: :py:class:`list` |
def forward_committor(T, A, B):
r"""Forward committor between given sets.
The forward committor u(x) between sets A and B is the probability
for the chain starting in x to reach B before reaching A.
Parameters
----------
T : (M, M) scipy.sparse matrix
Transition matrix
A : array_li... | r"""Forward committor between given sets.
The forward committor u(x) between sets A and B is the probability
for the chain starting in x to reach B before reaching A.
Parameters
----------
T : (M, M) scipy.sparse matrix
Transition matrix
A : array_like
List of integer state lab... |
def load_from_file(self, yamlfile, _override=True, _allow_undeclared=False):
"""Loads the configuration from a file.
Parsed contents must be a single dict mapping config key to value.
Args:
yamlfile: The opened file object to load configuration from.
See load_from_dict() for other args' descri... | Loads the configuration from a file.
Parsed contents must be a single dict mapping config key to value.
Args:
yamlfile: The opened file object to load configuration from.
See load_from_dict() for other args' descriptions.
Raises:
ConfigurationInvalidError: If configuration file can't be... |
def predict_proba(self, X):
"""Predict class probabilities for X.
The predicted class probabilities of an input sample is computed as
the mean predicted class probabilities of the base estimators in the
ensemble. If base estimators do not implement a ``predict_proba``
method, th... | Predict class probabilities for X.
The predicted class probabilities of an input sample is computed as
the mean predicted class probabilities of the base estimators in the
ensemble. If base estimators do not implement a ``predict_proba``
method, then it resorts to voting and the predict... |
def generateMethods(self):
"""Generate some member functions
"""
for i in range(1, 5):
# adds member function grid_ixi_slot(self)
self.make_grid_slot(i, i)
for cl in self.mvision_classes:
self.make_mvision_slot(cl) | Generate some member functions |
def _generate_signature(self, nonce, method, path, data):
"""Generate the call signature
:param path:
:param data:
:param nonce:
:return: signature string
"""
data_json = ""
endpoint = path
if method == "get":
if data:
... | Generate the call signature
:param path:
:param data:
:param nonce:
:return: signature string |
def map_collection(func, collection):
"""
Apply func to each element of a collection, or value of a dictionary.
If the value is not a collection, return it unmodified
"""
datatype = type(collection)
if isinstance(collection, Mapping):
return datatype((key, func(val)) for key, val in coll... | Apply func to each element of a collection, or value of a dictionary.
If the value is not a collection, return it unmodified |
def getfigsize(self, opt):
'''calculate appropriate sizes for the subfigures
'''
if opt.xmin is None:
opt.xmin = self.plotman.grid.grid['x'].min()
if opt.xmax is None:
opt.xmax = self.plotman.grid.grid['x'].max()
if opt.zmin is None:
opt.zmin =... | calculate appropriate sizes for the subfigures |
def __purge():
"""Remove all dead signal receivers from the global receivers collection.
Note:
It is assumed that the caller holds the __lock.
"""
global __receivers
newreceivers = collections.defaultdict(list)
for signal, receivers in six.iteritems(__receivers):
alive = [x for... | Remove all dead signal receivers from the global receivers collection.
Note:
It is assumed that the caller holds the __lock. |
def _fillVolumesAndPaths(self, paths):
""" Fill in paths.
:arg paths: = { Store.Volume: ["linux path",]}
"""
self.diffs = collections.defaultdict((lambda: []))
self.extraKeys = {}
for key in self.bucket.list():
if key.name.startswith(theTrashPrefix):
... | Fill in paths.
:arg paths: = { Store.Volume: ["linux path",]} |
def fcoe_get_interface_output_fcoe_intf_total_interfaces(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe_get_interface = ET.Element("fcoe_get_interface")
config = fcoe_get_interface
output = ET.SubElement(fcoe_get_interface, "output")
... | Auto Generated Code |
def grouper(n, iterable, padvalue=None):
"grouper(3, 'abcdefg', 'x') --> ('a','b','c'), ('d','e','f'), ('g','x','x')"
return zip_longest(*[iter(iterable)]*n, fillvalue=padvalue) | grouper(3, 'abcdefg', 'x') --> ('a','b','c'), ('d','e','f'), ('g','x','x') |
def dependence_plot(ind, shap_values, features, feature_names=None, display_features=None,
interaction_index="auto",
color="#1E88E5", axis_color="#333333", cmap=colors.red_blue,
dot_size=16, x_jitter=0, alpha=1, title=None, xmin=None, xmax=None, show=True):
... | Create a SHAP dependence plot, colored by an interaction feature.
Plots the value of the feature on the x-axis and the SHAP value of the same feature
on the y-axis. This shows how the model depends on the given feature, and is like a
richer extenstion of the classical parital dependence plots. Vertical dis... |
def _ed25519_key_from_file(fn, path):
"""Create an ed25519 key from the contents of ``path``.
``path`` is a filepath containing a base64-encoded ed25519 key seed.
Args:
fn (callable): the function to call with the contents from ``path``
path (str): the file path to the base64-encoded key s... | Create an ed25519 key from the contents of ``path``.
``path`` is a filepath containing a base64-encoded ed25519 key seed.
Args:
fn (callable): the function to call with the contents from ``path``
path (str): the file path to the base64-encoded key seed.
Returns:
obj: the appropria... |
def histogram(a, bins=10, range=None, **kwargs):
"""Compute the histogram of the input data.
Parameters
----------
a : NDArray
Input data. The histogram is computed over the flattened array.
bins : int or sequence of scalars
If bins is an int, it defines the number of equal-width bi... | Compute the histogram of the input data.
Parameters
----------
a : NDArray
Input data. The histogram is computed over the flattened array.
bins : int or sequence of scalars
If bins is an int, it defines the number of equal-width bins in the
given range (10, by default). If bins ... |
def init_default(required, default, optional_default):
"""
Returns optional default if field is not required and
default was not provided.
:param bool required: whether the field is required in a given model.
:param default: default provided by creator of field.
:param optional_default: default... | Returns optional default if field is not required and
default was not provided.
:param bool required: whether the field is required in a given model.
:param default: default provided by creator of field.
:param optional_default: default for the data type if none provided.
:return: default or option... |
def size(self, store_hashes=True):
"""
Retrieves the size in bytes of this ZIP content.
:return: Size of the zip content in bytes
"""
if self.modified:
self.__cache_content(store_hashes)
return len(self.cached_content) | Retrieves the size in bytes of this ZIP content.
:return: Size of the zip content in bytes |
def create_fake_mirror(src, dst):
"""Copy all dir, files from ``src`` to ``dst``. But only create a empty file
with same file name. Of course, the tree structure doesn't change.
A recipe gadget to create some test data set.
Make sure to use absolute path.
... | Copy all dir, files from ``src`` to ``dst``. But only create a empty file
with same file name. Of course, the tree structure doesn't change.
A recipe gadget to create some test data set.
Make sure to use absolute path.
**中文文档**
复制整个src目录下的文件树结... |
def skyimage_figure(cluster):
"""
Given a cluster create a Bokeh plot figure using the
cluster's image.
"""
pf_image = figure(x_range=(0, 1), y_range=(0, 1),
title='Image of {0}'.format(cluster.name))
pf_image.image_url(url=[cluster.image_path],
x=0, ... | Given a cluster create a Bokeh plot figure using the
cluster's image. |
def argmax(self, axis=None, skipna=True):
"""
Return an ndarray of the maximum argument indexer.
Parameters
----------
axis : {None}
Dummy argument for consistency with Series
skipna : bool, default True
See Also
--------
numpy.ndarra... | Return an ndarray of the maximum argument indexer.
Parameters
----------
axis : {None}
Dummy argument for consistency with Series
skipna : bool, default True
See Also
--------
numpy.ndarray.argmax |
def _get_instructions_bytes(code, varnames=None, names=None, constants=None,
cells=None, linestarts=None, line_offset=0):
"""Iterate over the instructions in a bytecode string.
Generates a sequence of Instruction namedtuples giving the details of each
opcode. Additional informa... | Iterate over the instructions in a bytecode string.
Generates a sequence of Instruction namedtuples giving the details of each
opcode. Additional information about the code's runtime environment
(e.g. variable names, constants) can be specified using optional
arguments. |
def create(klass, account, name):
"""
Creates a new tailored audience.
"""
audience = klass(account)
getattr(audience, '__create_audience__')(name)
try:
return audience.reload()
except BadRequest as e:
audience.delete()
raise e | Creates a new tailored audience. |
def make_at_least_n_items_valid(flag_list, n):
"""
tries to make at least min(len(flag_list, n) items True in flag_list
Args:
flag_list (list): list of booleans
n (int): number of items to ensure are True
CommandLine:
python -m utool.util_dev --test-make_at_least_n_items_valid
... | tries to make at least min(len(flag_list, n) items True in flag_list
Args:
flag_list (list): list of booleans
n (int): number of items to ensure are True
CommandLine:
python -m utool.util_dev --test-make_at_least_n_items_valid
Example:
>>> # ENABLE_DOCTEST
>>> from... |
def run(self):
"""
Runs its worker method.
This method will be terminated once its parent's is_running
property turns False.
"""
while self._base.is_running:
if self._worker:
self._worker()
time.sleep(self._sleep_duration) | Runs its worker method.
This method will be terminated once its parent's is_running
property turns False. |
def cmdloop(self, intro: Optional[str] = None) -> None:
"""This is an outer wrapper around _cmdloop() which deals with extra features provided by cmd2.
_cmdloop() provides the main loop equivalent to cmd.cmdloop(). This is a wrapper around that which deals with
the following extra features pro... | This is an outer wrapper around _cmdloop() which deals with extra features provided by cmd2.
_cmdloop() provides the main loop equivalent to cmd.cmdloop(). This is a wrapper around that which deals with
the following extra features provided by cmd2:
- commands at invocation
- transcrip... |
def _execute_get_url(self, request_url, append_sid=True):
"""Function to execute and handle a GET request"""
# Prepare Request
self._debuglog("Requesting URL: '" + request_url + "'")
if append_sid:
self._debuglog("Appending access_token (SID: " +
... | Function to execute and handle a GET request |
def _cleanup_ca_temp_file(self):
"""
Function to clean up ca temp file for requests.
**Returns:** Removes TEMP ca file, no return
"""
if os.name == 'nt':
if isinstance(self.ca_verify_filename, (binary_type, text_type)):
# windows requires file to be c... | Function to clean up ca temp file for requests.
**Returns:** Removes TEMP ca file, no return |
def define_task(name,
tick_script,
task_type='stream',
database=None,
retention_policy='default',
dbrps=None):
'''
Define a task. Serves as both create/update.
name
Name of the task.
tick_script
Path to the... | Define a task. Serves as both create/update.
name
Name of the task.
tick_script
Path to the TICK script for the task. Can be a salt:// source.
task_type
Task type. Defaults to 'stream'
dbrps
A list of databases and retention policies in "dbname"."rpname" format
... |
def createDirStruct(paths, verbose=True):
'''Loops ait.config._datapaths from AIT_CONFIG and creates a directory.
Replaces year and doy with the respective year and day-of-year.
If neither are given as arguments, current UTC day and year are used.
Args:
paths:
[optional] list of di... | Loops ait.config._datapaths from AIT_CONFIG and creates a directory.
Replaces year and doy with the respective year and day-of-year.
If neither are given as arguments, current UTC day and year are used.
Args:
paths:
[optional] list of directory paths you would like to create.
... |
def order_upgrades(self, upgrades, history=None):
"""Order upgrades according to their dependencies.
(topological sort using
Kahn's algorithm - http://en.wikipedia.org/wiki/Topological_sorting).
:param upgrades: Dict of upgrades
:param history: Dict of applied upgrades
... | Order upgrades according to their dependencies.
(topological sort using
Kahn's algorithm - http://en.wikipedia.org/wiki/Topological_sorting).
:param upgrades: Dict of upgrades
:param history: Dict of applied upgrades |
def native_decode_source(text):
"""Use codec specified in file to decode to unicode
Then, encode unicode to native str:
Python 2: bytes
Python 3: unicode
"""
if ((only_python3 and isinstance(text, bytes))
or (only_python2 and isinstance(text, str))):
text = decode_sou... | Use codec specified in file to decode to unicode
Then, encode unicode to native str:
Python 2: bytes
Python 3: unicode |
def ProcessLine(filename, file_extension, clean_lines, line,
include_state, function_state, nesting_state, error,
extra_check_functions=[]):
"""Processes a single line in the file.
Args:
filename: Filename of the file that is being processed.
file_extension: The extension (d... | Processes a single line in the file.
Args:
filename: Filename of the file that is being processed.
file_extension: The extension (dot not included) of the file.
clean_lines: An array of strings, each representing a line of the file,
with comments stripped.
line: Number of line being ... |
def write_java_message(key,val,text_file):
"""
Loop through all java messages that are not associated with a unit test and
write them into a log file.
Parameters
----------
key : str
9.general_bad_java_messages
val : list of list of str
contains the bad java messages and th... | Loop through all java messages that are not associated with a unit test and
write them into a log file.
Parameters
----------
key : str
9.general_bad_java_messages
val : list of list of str
contains the bad java messages and the message types.
:return: none |
def random_str(size=10):
"""
create random string of selected size
:param size: int, length of the string
:return: the string
"""
return ''.join(random.choice(string.ascii_lowercase) for _ in range(size)) | create random string of selected size
:param size: int, length of the string
:return: the string |
def serialize(self, method="urlencoded", lev=0, **kwargs):
"""
Convert this instance to another representation. Which representation
is given by the choice of serialization method.
:param method: A serialization method. Presently 'urlencoded', 'json',
'jwt' and 'dic... | Convert this instance to another representation. Which representation
is given by the choice of serialization method.
:param method: A serialization method. Presently 'urlencoded', 'json',
'jwt' and 'dict' is supported.
:param lev:
:param kwargs: Extra key word arg... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.