code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def get_protein_data_pgrouped(proteindata, p_acc, headerfields):
"""Parses protein data for a certain protein into tsv output
dictionary"""
report = get_protein_data_base(proteindata, p_acc, headerfields)
return get_cov_protnumbers(proteindata, p_acc, report) | Parses protein data for a certain protein into tsv output
dictionary |
def truncate(self, length):
"""Return a new `Multihash` with a shorter digest `length`.
If the given `length` is greater than the original, a `ValueError`
is raised.
>>> mh1 = Multihash(0x01, b'FOOBAR')
>>> mh2 = mh1.truncate(3)
>>> mh2 == (0x01, b'FOO')
True
... | Return a new `Multihash` with a shorter digest `length`.
If the given `length` is greater than the original, a `ValueError`
is raised.
>>> mh1 = Multihash(0x01, b'FOOBAR')
>>> mh2 = mh1.truncate(3)
>>> mh2 == (0x01, b'FOO')
True
>>> mh3 = mh1.truncate(10)
... |
def _process_state(cls, unprocessed, processed, state):
"""Preprocess a single state definition."""
assert type(state) is str, "wrong state name %r" % state
assert state[0] != '#', "invalid state name %r" % state
if state in processed:
return processed[state]
tokens =... | Preprocess a single state definition. |
def get_context_data(self, **kwargs):
"""Add context data to view"""
context = super().get_context_data(**kwargs)
tabs = self.get_active_tabs()
context.update({
'page_detail_tabs': tabs,
'active_tab': tabs[0].code if tabs else '',
'app_label': self.get... | Add context data to view |
def lx4num(string, first):
"""
Scan a string from a specified starting position for the
end of a number.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lx4num_c.html
:param string: Any character string.
:type string: str
:param first: First character to scan from in string.
:t... | Scan a string from a specified starting position for the
end of a number.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lx4num_c.html
:param string: Any character string.
:type string: str
:param first: First character to scan from in string.
:type first: int
:return: last and nc... |
def fmap(self, f: Callable[[T], B]) -> 'List[B]':
"""doufo.List.fmap: map `List`
Args:
`self`:
`f` (`Callable[[T], B]`): any callable funtion
Returns:
return (`List[B]`): A `List` of objected from `f`.
Raises:
"""
return Lis... | doufo.List.fmap: map `List`
Args:
`self`:
`f` (`Callable[[T], B]`): any callable funtion
Returns:
return (`List[B]`): A `List` of objected from `f`.
Raises: |
def is_valid_mac(addr):
"""Check the syntax of a given mac address.
The acceptable format is xx:xx:xx:xx:xx:xx
"""
addrs = addr.split(':')
if len(addrs) != 6:
return False
for m in addrs:
try:
if int(m, 16) > 255:
return False
except ValueErro... | Check the syntax of a given mac address.
The acceptable format is xx:xx:xx:xx:xx:xx |
def analyze(problem, Y, calc_second_order=True, num_resamples=100,
conf_level=0.95, print_to_console=False, parallel=False,
n_processors=None, seed=None):
"""Perform Sobol Analysis on model outputs.
Returns a dictionary with keys 'S1', 'S1_conf', 'ST', and 'ST_conf', where
eac... | Perform Sobol Analysis on model outputs.
Returns a dictionary with keys 'S1', 'S1_conf', 'ST', and 'ST_conf', where
each entry is a list of size D (the number of parameters) containing the
indices in the same order as the parameter file. If calc_second_order is
True, the dictionary also contains ... |
def renumber(args):
"""
%prog renumber Mt35.consolidated.bed > tagged.bed
Renumber genes for annotation updates.
"""
from jcvi.algorithms.lis import longest_increasing_subsequence
from jcvi.utils.grouper import Grouper
p = OptionParser(renumber.__doc__)
p.set_annot_reformat_opts()
... | %prog renumber Mt35.consolidated.bed > tagged.bed
Renumber genes for annotation updates. |
def getNextService(self, discover):
"""Return the next authentication service for the pair of
user_input and session. This function handles fallback.
@param discover: a callable that takes a URL and returns a
list of services
@type discover: str -> [service]
@re... | Return the next authentication service for the pair of
user_input and session. This function handles fallback.
@param discover: a callable that takes a URL and returns a
list of services
@type discover: str -> [service]
@return: the next available service |
def awake(self, procid):
""" Remove procid from waitlists and reestablish it in the running list """
logger.debug(f"Remove procid:{procid} from waitlists and reestablish it in the running list")
for wait_list in self.rwait:
if procid in wait_list:
wait_list.remove(pro... | Remove procid from waitlists and reestablish it in the running list |
def encode_username_password(
username: Union[str, bytes], password: Union[str, bytes]
) -> bytes:
"""Encodes a username/password pair in the format used by HTTP auth.
The return value is a byte string in the form ``username:password``.
.. versionadded:: 5.1
"""
if isinstance(username, unicode... | Encodes a username/password pair in the format used by HTTP auth.
The return value is a byte string in the form ``username:password``.
.. versionadded:: 5.1 |
def set_translation(lang):
"""Set the translation used by (some) pywws modules.
This sets the translation object ``pywws.localisation.translation``
to use a particular language.
The ``lang`` parameter can be any string of the form ``en``,
``en_GB`` or ``en_GB.UTF-8``. Anything after a ``.`` charac... | Set the translation used by (some) pywws modules.
This sets the translation object ``pywws.localisation.translation``
to use a particular language.
The ``lang`` parameter can be any string of the form ``en``,
``en_GB`` or ``en_GB.UTF-8``. Anything after a ``.`` character is
ignored. In the case of... |
def accept(self):
"""
Call the :meth:`accept` method of the underlying socket and set up SSL
on the returned socket, using the Context object supplied to this
:class:`Connection` object at creation.
:return: A *(conn, addr)* pair where *conn* is the new
:class:`Conne... | Call the :meth:`accept` method of the underlying socket and set up SSL
on the returned socket, using the Context object supplied to this
:class:`Connection` object at creation.
:return: A *(conn, addr)* pair where *conn* is the new
:class:`Connection` object created, and *address* i... |
def attach_related_file(self, path, mimetype=None):
"""Attaches a file from the filesystem."""
filename = os.path.basename(path)
content = open(path, 'rb').read()
self.attach_related(filename, content, mimetype) | Attaches a file from the filesystem. |
def convertPrice(variant, regex=None, short_regex=None, none_regex=none_price_regex):
''' Helper function to convert the given input price into integers (cents
count). :obj:`int`, :obj:`float` and :obj:`str` are supported
:param variant: Price
:param re.compile regex: Regex to convert str i... | Helper function to convert the given input price into integers (cents
count). :obj:`int`, :obj:`float` and :obj:`str` are supported
:param variant: Price
:param re.compile regex: Regex to convert str into price. The re should
contain two named groups `euro` and `cent`
:para... |
def margin(
self,
axis=None,
weighted=True,
include_missing=False,
include_transforms_for_dims=None,
prune=False,
include_mr_cat=False,
):
"""Return ndarray representing slice margin across selected axis.
A margin (or basis) can be calculated ... | Return ndarray representing slice margin across selected axis.
A margin (or basis) can be calculated for a contingency table, provided
that the dimensions of the desired directions are marginable. The
dimensions are marginable if they represent mutualy exclusive data,
such as true categ... |
def get_entry_categories(self, category_nodes):
"""
Return a list of entry's categories
based on imported categories.
"""
categories = []
for category_node in category_nodes:
domain = category_node.attrib.get('domain')
if domain == 'category':
... | Return a list of entry's categories
based on imported categories. |
def get_input(problem):
"""" Returns the specified problem answer in the form
problem: problem id
Returns string, or bytes if a file is loaded
"""
input_data = load_input()
pbsplit = problem.split(":")
problem_input = input_data['input'][pbsplit[0]]
if isinstance(problem_input... | Returns the specified problem answer in the form
problem: problem id
Returns string, or bytes if a file is loaded |
def solubility_parameter(self):
r'''Solubility parameter of the chemical at its
current temperature and pressure, in units of [Pa^0.5].
.. math::
\delta = \sqrt{\frac{\Delta H_{vap} - RT}{V_m}}
Calculated based on enthalpy of vaporization and molar volume.
Normally ... | r'''Solubility parameter of the chemical at its
current temperature and pressure, in units of [Pa^0.5].
.. math::
\delta = \sqrt{\frac{\Delta H_{vap} - RT}{V_m}}
Calculated based on enthalpy of vaporization and molar volume.
Normally calculated at STP. For uses of this prop... |
def withAttribute(*args,**attrDict):
"""Helper to create a validating parse action to be used with start
tags created with :class:`makeXMLTags` or
:class:`makeHTMLTags`. Use ``withAttribute`` to qualify
a starting tag with a required attribute value, to avoid false
matches on common tags such as ``<... | Helper to create a validating parse action to be used with start
tags created with :class:`makeXMLTags` or
:class:`makeHTMLTags`. Use ``withAttribute`` to qualify
a starting tag with a required attribute value, to avoid false
matches on common tags such as ``<TD>`` or ``<DIV>``.
Call ``withAttribut... |
def register_blueprints(app, application_package_name=None, blueprint_directory=None):
"""Register Flask blueprints on app object"""
if not application_package_name:
application_package_name = 'app'
if not blueprint_directory:
blueprint_directory = os.path.join(os.getcwd(), application_pack... | Register Flask blueprints on app object |
def UpdateFlow(self,
client_id,
flow_id,
flow_obj=db.Database.unchanged,
flow_state=db.Database.unchanged,
client_crash_info=db.Database.unchanged,
pending_termination=db.Database.unchanged,
processing... | Updates flow objects in the database. |
def remove_accounts_from_group(accounts_query, group):
""" Remove accounts from group. """
query = accounts_query.filter(date_deleted__isnull=True)
for account in query:
remove_account_from_group(account, group) | Remove accounts from group. |
def __read_device(self):
"""Read the state of the gamepad."""
state = XinputState()
res = self.manager.xinput.XInputGetState(
self.__device_number, ctypes.byref(state))
if res == XINPUT_ERROR_SUCCESS:
return state
if res != XINPUT_ERROR_DEVICE_NOT_CONNECTE... | Read the state of the gamepad. |
def execute_catch(c, sql, vars=None):
"""Run a query, but ignore any errors. For error recovery paths where the error handler should not raise another."""
try:
c.execute(sql, vars)
except Exception as err:
cmd = sql.split(' ', 1)[0]
log.error("Error executing %s: %s", cmd, err) | Run a query, but ignore any errors. For error recovery paths where the error handler should not raise another. |
def create_intent(self,
parent,
intent,
language_code=None,
intent_view=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
... | Creates an intent in the specified agent.
Example:
>>> import dialogflow_v2
>>>
>>> client = dialogflow_v2.IntentsClient()
>>>
>>> parent = client.project_agent_path('[PROJECT]')
>>>
>>> # TODO: Initialize ``intent``:
... |
def from_bytes(OverwinterTx, byte_string):
'''
byte-like -> OverwinterTx
'''
header = byte_string[0:4]
group_id = byte_string[4:8]
if header != b'\x03\x00\x00\x80' or group_id != b'\x70\x82\xc4\x03':
raise ValueError(
'Bad header or group ID. ... | byte-like -> OverwinterTx |
def add_to_stage(self, paths):
"""Stage given files
:param paths:
:return:
"""
cmd = self._command.add(paths)
(code, stdout, stderr) = self._exec(cmd)
if code:
raise errors.VCSError('Can\'t add paths to VCS. Process exited with code %d and message: ... | Stage given files
:param paths:
:return: |
def convert_runsummary_to_json(
df, comment='Uploaded via km3pipe.StreamDS', prefix='TEST_'
):
"""Convert a Pandas DataFrame with runsummary to JSON for DB upload"""
data_field = []
comment += ", by {}".format(getpass.getuser())
for det_id, det_data in df.groupby('det_id'):
runs_field = ... | Convert a Pandas DataFrame with runsummary to JSON for DB upload |
def add_method(obj, func, name=None):
"""Adds an instance method to an object."""
if name is None:
name = func.__name__
if sys.version_info < (3,):
method = types.MethodType(func, obj, obj.__class__)
else:
method = types.MethodType(func, obj)
setattr(obj, name, method) | Adds an instance method to an object. |
def _histogram_move_keys_by_game(sess, ds, batch_size=8*1024):
"""Given dataset of key names, return histogram of moves/game.
Move counts are written by the game players, so
this is mostly useful for repair or backfill.
Args:
sess: TF session
ds: TF dataset containing game move keys.
... | Given dataset of key names, return histogram of moves/game.
Move counts are written by the game players, so
this is mostly useful for repair or backfill.
Args:
sess: TF session
ds: TF dataset containing game move keys.
batch_size: performance tuning parameter |
def _limit_features(self, X, vocabulary, high=None, low=None,
limit=None):
"""Remove too rare or too common features.
Prune features that are non zero in more samples than high or less
documents than low, modifying the vocabulary, and restricting it to
at most th... | Remove too rare or too common features.
Prune features that are non zero in more samples than high or less
documents than low, modifying the vocabulary, and restricting it to
at most the limit most frequent.
This does not prune samples with zero features. |
def _parse_args(self,freqsAngles=True,_firstFlip=False,*args):
"""Helper function to parse the arguments to the __call__ and actionsFreqsAngles functions"""
from galpy.orbit import Orbit
RasOrbit= False
integrated= True #whether the orbit was already integrated when given
if len(... | Helper function to parse the arguments to the __call__ and actionsFreqsAngles functions |
def get_user_presence(self, userid):
''' check on presence of a user '''
response, status_code = self.__pod__.Presence.get_v2_user_uid_presence(
sessionToken=self.__session__,
uid=userid
).result()
self.logger.debug('%s: %s' % (status_code, response))
retu... | check on presence of a user |
def get_child_by_name(parent, name):
"""
Iterate through a gtk container, `parent`,
and return the widget with the name `name`.
"""
# http://stackoverflow.com/questions/2072976/access-to-widget-in-gtk
def iterate_children(widget, name):
if widget.get_name() == name:
return wi... | Iterate through a gtk container, `parent`,
and return the widget with the name `name`. |
def add_item_metadata(self, handle, key, value):
"""Store the given key:value pair for the item associated with handle.
:param handle: handle for accessing an item before the dataset is
frozen
:param key: metadata key
:param value: metadata value
"""
... | Store the given key:value pair for the item associated with handle.
:param handle: handle for accessing an item before the dataset is
frozen
:param key: metadata key
:param value: metadata value |
def create_embeded_pkcs7_signature(data, cert, key):
"""
Creates an embeded ("nodetached") pkcs7 signature.
This is equivalent to the output of::
openssl smime -sign -signer cert -inkey key -outform DER -nodetach < data
:type data: bytes
:type cert: str
:type key: str
""" # noqa:... | Creates an embeded ("nodetached") pkcs7 signature.
This is equivalent to the output of::
openssl smime -sign -signer cert -inkey key -outform DER -nodetach < data
:type data: bytes
:type cert: str
:type key: str |
def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False):
""" Attempts to convert 'source' to the types that this generator can
handle. The intention is to produce the set of targets can should be
used when generator is run.
only_one: convert... | Attempts to convert 'source' to the types that this generator can
handle. The intention is to produce the set of targets can should be
used when generator is run.
only_one: convert 'source' to only one of source types
if there's more that one possibility, re... |
def set_host_finished(self, scan_id, target, host):
""" Add the host in a list of finished hosts """
finished_hosts = self.scans_table[scan_id]['finished_hosts']
finished_hosts[target].extend(host)
self.scans_table[scan_id]['finished_hosts'] = finished_hosts | Add the host in a list of finished hosts |
def dist(src, tar, method=sim_levenshtein):
"""Return a distance between two strings.
This is a generalized function for calling other distance functions.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
method : function
... | Return a distance between two strings.
This is a generalized function for calling other distance functions.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
method : function
Specifies the similarity metric (:py:func:`s... |
def _select_index(self, row, col):
"""Change the selection index, and make sure it stays in the right range
A little more complicated than just dooing modulo the number of row columns
to be sure to cycle through all element.
horizontaly, the element are maped like this :
to r <... | Change the selection index, and make sure it stays in the right range
A little more complicated than just dooing modulo the number of row columns
to be sure to cycle through all element.
horizontaly, the element are maped like this :
to r <-- a b c d e f --> to g
to f <-- g h i... |
def addFilter(self, filterMethod=FILTER_METHOD_AND, **kwargs):
'''
addFilter - Add a filter to this query.
@param filterMethod <str> - The filter method to use (AND or OR), default: 'AND'
@param additional args - Filter arguments. @see QueryableListBase.filter
... | addFilter - Add a filter to this query.
@param filterMethod <str> - The filter method to use (AND or OR), default: 'AND'
@param additional args - Filter arguments. @see QueryableListBase.filter
@raises ValueError if filterMethod is not one of known methods. |
def lbd_to_XYZ_jac(*args,**kwargs):
"""
NAME:
lbd_to_XYZ_jac
PURPOSE:
calculate the Jacobian of the Galactic spherical coordinates to Galactic rectangular coordinates transformation
INPUT:
l,b,D- Galactic spherical coordinates
vlos,pmll,pmbb- Galactic spherical velociti... | NAME:
lbd_to_XYZ_jac
PURPOSE:
calculate the Jacobian of the Galactic spherical coordinates to Galactic rectangular coordinates transformation
INPUT:
l,b,D- Galactic spherical coordinates
vlos,pmll,pmbb- Galactic spherical velocities (some as proper motions)
if 6 inputs:... |
def get_alert(thing_name, key, session=None):
"""Set an alert on a thing with the given condition
"""
return _request('get', '/get/alert/for/{0}'.format(thing_name), params={'key': key}, session=session) | Set an alert on a thing with the given condition |
def show_lbaas_healthmonitor(self, lbaas_healthmonitor, **_params):
"""Fetches information for a lbaas_healthmonitor."""
return self.get(self.lbaas_healthmonitor_path % (lbaas_healthmonitor),
params=_params) | Fetches information for a lbaas_healthmonitor. |
def handle_url_build_error(self, error: Exception, endpoint: str, values: dict) -> str:
"""Handle a build error.
Ideally this will return a valid url given the error endpoint
and values.
"""
for handler in self.url_build_error_handlers:
result = handler(error, endpoi... | Handle a build error.
Ideally this will return a valid url given the error endpoint
and values. |
def tdSensorValue(self, protocol, model, sid, datatype):
"""Get the sensor value for a given sensor.
:return: a dict with the keys: value, timestamp.
"""
value = create_string_buffer(20)
timestamp = c_int()
self._lib.tdSensorValue(protocol, model, sid, datatype,
... | Get the sensor value for a given sensor.
:return: a dict with the keys: value, timestamp. |
def robust_outer_product(vec_1, vec_2):
"""
Calculates a 'robust' outer product of two vectors that may or may not
contain very small values.
Parameters
----------
vec_1 : 1D ndarray
vec_2 : 1D ndarray
Returns
-------
outer_prod : 2D ndarray. The outer product of vec_1 and vec_... | Calculates a 'robust' outer product of two vectors that may or may not
contain very small values.
Parameters
----------
vec_1 : 1D ndarray
vec_2 : 1D ndarray
Returns
-------
outer_prod : 2D ndarray. The outer product of vec_1 and vec_2 |
def sort_tiers(self, key=lambda x: x.name):
"""Sort the tiers given the key. Example key functions:
Sort according to the tiername in a list:
``lambda x: ['name1', 'name2' ... 'namen'].index(x.name)``.
Sort according to the number of annotations:
``lambda x: len(list(x.get_in... | Sort the tiers given the key. Example key functions:
Sort according to the tiername in a list:
``lambda x: ['name1', 'name2' ... 'namen'].index(x.name)``.
Sort according to the number of annotations:
``lambda x: len(list(x.get_intervals()))``
:param func key: A key function.... |
def business_rule_notification_is_blocked(self, hosts, services):
# pylint: disable=too-many-locals
"""Process business rule notifications behaviour. If all problems have
been acknowledged, no notifications should be sent if state is not OK.
By default, downtimes are ignored, unless expl... | Process business rule notifications behaviour. If all problems have
been acknowledged, no notifications should be sent if state is not OK.
By default, downtimes are ignored, unless explicitly told to be treated
as acknowledgements through with the business_rule_downtime_as_ack set.
:ret... |
def generate_single_simulation(self, x):
"""
Generate a single SSA simulation
:param x: an integer to reset the random seed. If None, the initial random number generator is used
:return: a list of :class:`~means.simulation.Trajectory` one per species in the problem
:rtype: list[:... | Generate a single SSA simulation
:param x: an integer to reset the random seed. If None, the initial random number generator is used
:return: a list of :class:`~means.simulation.Trajectory` one per species in the problem
:rtype: list[:class:`~means.simulation.Trajectory`] |
def create_untl_xml_subelement(parent, element, prefix=''):
"""Create a UNTL XML subelement."""
subelement = SubElement(parent, prefix + element.tag)
if element.content is not None:
subelement.text = element.content
if element.qualifier is not None:
subelement.attrib["qualifier"] = eleme... | Create a UNTL XML subelement. |
def _bundle_generic(bfile, addhelper, fmt, reffmt, data_dir):
'''
Loop over all basis sets and add data to an archive
Parameters
----------
bfile : object
An object that gets passed through to the addhelper function
addhelper : function
A function that takes bfile and adds data ... | Loop over all basis sets and add data to an archive
Parameters
----------
bfile : object
An object that gets passed through to the addhelper function
addhelper : function
A function that takes bfile and adds data to the bfile
fmt : str
Format of the basis set to create
r... |
def snapshot(self):
"""Snapshot current state."""
self._snapshot = {
'muted': self.muted,
'volume': self.volume,
'stream': self.stream
}
_LOGGER.info('took snapshot of current state of %s', self.friendly_name) | Snapshot current state. |
def transform_q(q, query):
"""
Replaces (lookup, value) children of Q with equivalent WhereNode objects.
This is a pre-prep of our Q object, ready for later rendering into SQL.
Modifies in place, no need to return.
(We could do this in render_q, but then we'd have to pass the Query object
from... | Replaces (lookup, value) children of Q with equivalent WhereNode objects.
This is a pre-prep of our Q object, ready for later rendering into SQL.
Modifies in place, no need to return.
(We could do this in render_q, but then we'd have to pass the Query object
from ConditionalAggregate down into SQLCond... |
def migrate_passwords_to_leader_storage(self, excludes=None):
"""Migrate any passwords storage on disk to leader storage."""
if not is_leader():
log("Skipping password migration as not the lead unit",
level=DEBUG)
return
dirname = os.path.dirname(self.root... | Migrate any passwords storage on disk to leader storage. |
def main(sample_id, assembly_file, minsize):
"""Main executor of the process_mapping template.
Parameters
----------
sample_id : str
Sample Identification string.
assembly: str
Path to the fatsa file generated by the assembler.
minsize: str
Min contig size to be consider... | Main executor of the process_mapping template.
Parameters
----------
sample_id : str
Sample Identification string.
assembly: str
Path to the fatsa file generated by the assembler.
minsize: str
Min contig size to be considered a complete ORF |
def _connect(self):
"""Try to connect to the database.
Raises:
:exc:`~ConnectionError`: If the connection to the database
fails.
:exc:`~AuthenticationError`: If there is a OperationFailure due to
Authentication failure after connecting to the data... | Try to connect to the database.
Raises:
:exc:`~ConnectionError`: If the connection to the database
fails.
:exc:`~AuthenticationError`: If there is a OperationFailure due to
Authentication failure after connecting to the database.
:exc:`~Config... |
def exception_wrapper(f):
"""Decorator to convert dbus exception to pympris exception."""
@wraps(f)
def wrapper(*args, **kwds):
try:
return f(*args, **kwds)
except dbus.exceptions.DBusException as err:
_args = err.args
raise PyMPRISException(*_args)
re... | Decorator to convert dbus exception to pympris exception. |
def set_affinity_matrix(self, affinity_mat):
"""
Parameters
----------
affinity_mat : sparse matrix (N_obs, N_obs).
The adjacency matrix to input.
"""
affinity_mat = check_array(affinity_mat, accept_sparse=sparse_formats)
if affinity_mat.shape[0] != af... | Parameters
----------
affinity_mat : sparse matrix (N_obs, N_obs).
The adjacency matrix to input. |
def encrypt(self):
"""
We perform no encryption, we just encode the value as base64 and then
decode it in decrypt().
"""
value = self.parameters.get("Plaintext")
if isinstance(value, six.text_type):
value = value.encode('utf-8')
return json.dumps({"Cip... | We perform no encryption, we just encode the value as base64 and then
decode it in decrypt(). |
def find_additional_rels(self, all_models):
"""Attempts to scan for additional relationship fields for this model based on all of the other models'
structures and relationships.
"""
for model_name, model in iteritems(all_models):
if model_name != self.name:
fo... | Attempts to scan for additional relationship fields for this model based on all of the other models'
structures and relationships. |
def get_instance_property(instance, property_name):
"""Retrieves property of an instance, keeps retrying until getting a non-None"""
name = get_name(instance)
while True:
try:
value = getattr(instance, property_name)
if value is not None:
break
print(f"retrieving {property_name} on ... | Retrieves property of an instance, keeps retrying until getting a non-None |
def memoizedmethod(method):
"""
Decorator that caches method result.
Args:
method (function): Method
Returns:
function: Memoized method.
Notes:
Target method class needs as "_cache" attribute (dict).
It is the case of "ObjectIOBase" and all its subclasses.
"""... | Decorator that caches method result.
Args:
method (function): Method
Returns:
function: Memoized method.
Notes:
Target method class needs as "_cache" attribute (dict).
It is the case of "ObjectIOBase" and all its subclasses. |
def ReadTrigger(self, trigger_link, options=None):
"""Reads a trigger.
:param str trigger_link:
The link to the trigger.
:param dict options:
The request options for the request.
:return:
The read Trigger.
:rtype:
dict
""... | Reads a trigger.
:param str trigger_link:
The link to the trigger.
:param dict options:
The request options for the request.
:return:
The read Trigger.
:rtype:
dict |
def shadow_hash(crypt_salt=None, password=None, algorithm='sha512'):
'''
Generates a salted hash suitable for /etc/shadow.
crypt_salt : None
Salt to be used in the generation of the hash. If one is not
provided, a random salt will be generated.
password : None
Value to be salte... | Generates a salted hash suitable for /etc/shadow.
crypt_salt : None
Salt to be used in the generation of the hash. If one is not
provided, a random salt will be generated.
password : None
Value to be salted and hashed. If one is not provided, a random
password will be generated... |
def check_type_and_values_of_specification_dict(specification_dict,
unique_alternatives):
"""
Verifies that the values of specification_dict have the correct type, have
the correct structure, and have valid values (i.e. are actually in the set
of possible ... | Verifies that the values of specification_dict have the correct type, have
the correct structure, and have valid values (i.e. are actually in the set
of possible alternatives). Will raise various errors if / when appropriate.
Parameters
----------
specification_dict : OrderedDict.
Keys are ... |
def fit(self, X, y=None):
"""Fits the GraphLasso covariance model to X.
Closely follows sklearn.covariance.graph_lasso.GraphLassoCV.
Parameters
----------
X : ndarray, shape (n_samples, n_features)
Data from which to compute the covariance estimate
"""
... | Fits the GraphLasso covariance model to X.
Closely follows sklearn.covariance.graph_lasso.GraphLassoCV.
Parameters
----------
X : ndarray, shape (n_samples, n_features)
Data from which to compute the covariance estimate |
def com_google_fonts_check_fstype(ttFont):
"""Checking OS/2 fsType.
Fonts must have their fsType field set to zero.
This setting is known as Installable Embedding, meaning
that none of the DRM restrictions are enabled on the fonts.
More info available at:
https://docs.microsoft.com/en-us/typography/openty... | Checking OS/2 fsType.
Fonts must have their fsType field set to zero.
This setting is known as Installable Embedding, meaning
that none of the DRM restrictions are enabled on the fonts.
More info available at:
https://docs.microsoft.com/en-us/typography/opentype/spec/os2#fstype |
def _parse_hparams(hparams):
"""Split hparams, based on key prefixes.
Args:
hparams: hyperparameters
Returns:
Tuple of hparams for respectably: agent, optimizer, runner, replay_buffer.
"""
prefixes = ["agent_", "optimizer_", "runner_", "replay_buffer_"]
ret = []
for prefix in prefixes:
ret_... | Split hparams, based on key prefixes.
Args:
hparams: hyperparameters
Returns:
Tuple of hparams for respectably: agent, optimizer, runner, replay_buffer. |
def generate(env):
"""Add Builders and construction variables for gnulink to an Environment."""
link.generate(env)
if env['PLATFORM'] == 'hpux':
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared -fPIC')
# __RPATH is set to $_RPATH in the platform specification if that
# platform su... | Add Builders and construction variables for gnulink to an Environment. |
def get_data_length(self):
# type: () -> int
'''
A method to get the length of the data that this Directory Record
points to.
Parameters:
None.
Returns:
The length of the data that this Directory Record points to.
'''
if not self._initia... | A method to get the length of the data that this Directory Record
points to.
Parameters:
None.
Returns:
The length of the data that this Directory Record points to. |
def ProbGreater(self, x):
"""Probability that a sample from this Pmf exceeds x.
x: number
returns: float probability
"""
t = [prob for (val, prob) in self.d.iteritems() if val > x]
return sum(t) | Probability that a sample from this Pmf exceeds x.
x: number
returns: float probability |
def pack(self, remaining_size):
"""Pack data of part into binary format"""
arguments_count, payload = self.pack_data(remaining_size - self.header_size)
payload_length = len(payload)
# align payload length to multiple of 8
if payload_length % 8 != 0:
payload += b"\x00... | Pack data of part into binary format |
def decrypt_subtitle(self, subtitle):
"""Decrypt encrypted subtitle data in high level model object
@param crunchyroll.models.Subtitle subtitle
@return str
"""
return self.decrypt(self._build_encryption_key(int(subtitle.id)),
subtitle['iv'][0].text.decode('base64'),
... | Decrypt encrypted subtitle data in high level model object
@param crunchyroll.models.Subtitle subtitle
@return str |
def clinvar_submission_header(submission_objs, csv_type):
"""Determine which fields to include in csv header by checking a list of submission objects
Args:
submission_objs(list): a list of objects (variants or casedata) to include in a csv file
csv_type(str) : 'variant_data' or 'cas... | Determine which fields to include in csv header by checking a list of submission objects
Args:
submission_objs(list): a list of objects (variants or casedata) to include in a csv file
csv_type(str) : 'variant_data' or 'case_data'
Returns:
custom_header(dict): A dict... |
def https_connection(self):
"""Return an https connection to this Connection's endpoint.
Returns a 3-tuple containing::
1. The :class:`HTTPSConnection` instance
2. Dictionary of auth headers to be used with the connection
3. The root url path (str) to be used for re... | Return an https connection to this Connection's endpoint.
Returns a 3-tuple containing::
1. The :class:`HTTPSConnection` instance
2. Dictionary of auth headers to be used with the connection
3. The root url path (str) to be used for requests. |
def add_number_widget(self, ref, x=1, value=1):
""" Add Number Widget """
if ref not in self.widgets:
widget = widgets.NumberWidget(screen=self, ref=ref, x=x, value=value)
self.widgets[ref] = widget
return self.widgets[ref] | Add Number Widget |
def _handle_ticker(self, dtype, data, ts):
"""Adds received ticker data to self.tickers dict, filed under its channel
id.
:param dtype:
:param data:
:param ts:
:return:
"""
self.log.debug("_handle_ticker: %s - %s - %s", dtype, data, ts)
channel_id... | Adds received ticker data to self.tickers dict, filed under its channel
id.
:param dtype:
:param data:
:param ts:
:return: |
def singularity_build(script=None, src=None, dest=None, **kwargs):
'''docker build command. By default a script is sent to the docker build command but
you can also specify different parameters defined inu//docker-py.readthedocs.org/en/stable/api/#build
'''
singularity = SoS_SingularityClient()
sing... | docker build command. By default a script is sent to the docker build command but
you can also specify different parameters defined inu//docker-py.readthedocs.org/en/stable/api/#build |
def _normalized_keys(self, section, items):
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
"""Normalizes items to construct a dictionary with normalized keys.
This routine is where the names become keys and are made the same
regardless of source - configuration files or envi... | Normalizes items to construct a dictionary with normalized keys.
This routine is where the names become keys and are made the same
regardless of source - configuration files or environment. |
def _make_publisher(catalog_or_dataset):
"""De estar presentes las claves necesarias, genera el diccionario
"publisher" a nivel catálogo o dataset."""
level = catalog_or_dataset
keys = [k for k in ["publisher_name", "publisher_mbox"] if k in level]
if keys:
level["publisher"] = {
... | De estar presentes las claves necesarias, genera el diccionario
"publisher" a nivel catálogo o dataset. |
def transitions_for(self, roles=None, actor=None, anchors=[]):
"""
For use on :class:`~coaster.sqlalchemy.mixins.RoleMixin` classes:
returns currently available transitions for the specified
roles or actor as a dictionary of name: :class:`StateTransitionWrapper`.
"""
prox... | For use on :class:`~coaster.sqlalchemy.mixins.RoleMixin` classes:
returns currently available transitions for the specified
roles or actor as a dictionary of name: :class:`StateTransitionWrapper`. |
def strftime(dt, fmt):
'''
`strftime` implementation working before 1900
'''
if _illegal_s.search(fmt):
raise TypeError("This strftime implementation does not handle %s")
if dt.year > 1900:
return dt.strftime(fmt)
fmt = fmt.replace('%c', '%a %b %d %H:%M:%S %Y')\
.re... | `strftime` implementation working before 1900 |
def parse_options_header(value, multiple=False):
"""Parse a ``Content-Type`` like header into a tuple with the content
type and the options:
>>> parse_options_header('text/html; charset=utf8')
('text/html', {'charset': 'utf8'})
This should not be used to parse ``Cache-Control`` like headers that u... | Parse a ``Content-Type`` like header into a tuple with the content
type and the options:
>>> parse_options_header('text/html; charset=utf8')
('text/html', {'charset': 'utf8'})
This should not be used to parse ``Cache-Control`` like headers that use
a slightly different format. For these headers u... |
def rename_ligand(self,ligand_name,mol_file):
"""
Get an atom selection for the selected from both topology and trajectory. Rename the ligand LIG
to help with ligand names that are not standard, e.g. contain numbers.
Takes:
* ligand_name * - MDAnalysis atom selection ... | Get an atom selection for the selected from both topology and trajectory. Rename the ligand LIG
to help with ligand names that are not standard, e.g. contain numbers.
Takes:
* ligand_name * - MDAnalysis atom selection for the ligand selected by user
Output:
... |
def _optimize_with_progs(format_module, filename, image_format):
"""
Use the correct optimizing functions in sequence.
And report back statistics.
"""
filesize_in = os.stat(filename).st_size
report_stats = None
for func in format_module.PROGRAMS:
if not getattr(Settings, func.__nam... | Use the correct optimizing functions in sequence.
And report back statistics. |
def push(self, item):
'''
Push an item
'''
self.server.lpush(self.key, self._encode_item(item)) | Push an item |
def get_prep_value(self, value):
'''The psycopg adaptor returns Python objects,
but we also have to handle conversion ourselves
'''
if isinstance(value, JSON.JsonDict):
return json.dumps(value, cls=JSON.Encoder)
if isinstance(value, JSON.JsonList):
ret... | The psycopg adaptor returns Python objects,
but we also have to handle conversion ourselves |
def convert_radian(coord, *variables):
"""Convert the given coordinate from radian to degree
Parameters
----------
coord: xr.Variable
The variable to transform
``*variables``
The variables that are on the same unit.
Returns
-------
xr.Variable
The transformed va... | Convert the given coordinate from radian to degree
Parameters
----------
coord: xr.Variable
The variable to transform
``*variables``
The variables that are on the same unit.
Returns
-------
xr.Variable
The transformed variable if one of the given `variables` has uni... |
def _print_foreign_playlist_message(self):
""" reset previous message """
self.operation_mode = self.window_mode = NORMAL_MODE
self.refreshBody()
""" display new message """
txt='''A playlist by this name:
__"|{0}|"
already exists in the config directory.
... | reset previous message |
def observe(matcher):
"""
Internal decorator to trigger operator hooks before/after
matcher execution.
"""
@functools.wraps(matcher)
def observer(self, subject, *expected, **kw):
# Trigger before hook, if present
if hasattr(self, 'before'):
... | Internal decorator to trigger operator hooks before/after
matcher execution. |
def trace(
data, name, format='png', datarange=(None, None), suffix='', path='./', rows=1, columns=1,
num=1, last=True, fontmap = None, verbose=1):
"""
Generates trace plot from an array of data.
:Arguments:
data: array or list
Usually a trace from an MCMC sample.
n... | Generates trace plot from an array of data.
:Arguments:
data: array or list
Usually a trace from an MCMC sample.
name: string
The name of the trace.
datarange: tuple or list
Preferred y-range of trace (defaults to (None,None)).
format (optional... |
def formatMessageForBuildResults(self, mode, buildername, buildset, build, master, previous_results, blamelist):
"""Generate a buildbot mail message and return a dictionary
containing the message body, type and subject."""
ss_list = buildset['sourcestamps']
results = build['results']
... | Generate a buildbot mail message and return a dictionary
containing the message body, type and subject. |
def update_user(resource_root, user):
"""
Update a user.
Replaces the user's details with those provided.
@param resource_root: The root Resource object
@param user: An ApiUser object
@return: An ApiUser object
"""
return call(resource_root.put,
'%s/%s' % (USERS_PATH, user.name), ApiUser, data=u... | Update a user.
Replaces the user's details with those provided.
@param resource_root: The root Resource object
@param user: An ApiUser object
@return: An ApiUser object |
def Clouds(name=None, deterministic=False, random_state=None):
"""
Augmenter to draw clouds in images.
This is a wrapper around ``CloudLayer``. It executes 1 to 2 layers per image, leading to varying densities
and frequency patterns of clouds.
This augmenter seems to be fairly robust w.r.t. the im... | Augmenter to draw clouds in images.
This is a wrapper around ``CloudLayer``. It executes 1 to 2 layers per image, leading to varying densities
and frequency patterns of clouds.
This augmenter seems to be fairly robust w.r.t. the image size. Tested with ``96x128``, ``192x256``
and ``960x1280``.
dt... |
def points(self, points):
""" set points without copying """
if not isinstance(points, np.ndarray):
raise TypeError('Points must be a numpy array')
# get the unique coordinates along each axial direction
x = np.unique(points[:,0])
y = np.unique(points[:,1])
z ... | set points without copying |
def encode(in_bytes):
"""Encode a string using Consistent Overhead Byte Stuffing (COBS).
Input is any byte string. Output is also a byte string.
Encoding guarantees no zero bytes in the output. The output
string will be expanded slightly, by a predictable amount.
An empty string is en... | Encode a string using Consistent Overhead Byte Stuffing (COBS).
Input is any byte string. Output is also a byte string.
Encoding guarantees no zero bytes in the output. The output
string will be expanded slightly, by a predictable amount.
An empty string is encoded to '\\x01 |
def run(self):
"""
Run a Quil program on the QVM multiple times and return the values stored in the
classical registers designated by the classical_addresses parameter.
:return: An array of bitstrings of shape ``(trials, len(classical_addresses))``
"""
super().run()
... | Run a Quil program on the QVM multiple times and return the values stored in the
classical registers designated by the classical_addresses parameter.
:return: An array of bitstrings of shape ``(trials, len(classical_addresses))`` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.