content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def wait(timeout):
"""
Just wait during the timeout passed as argument
Can be use on test execution
@param timeout: in second
@type timeout: float
"""
try:
timeout = float(timeout)
except Exception:
raise TestWaitException("ERR_TE_002: wait initialization failed, "
... | 5,358,700 |
def update_minor_ver_in_trunk(ver, revnum):
"""Change the minor version in trunk to the next (future) minor version.
"""
trunk_wc = get_trunk_wc_path()
trunk_url = get_trunk_url()
svn_checkout(trunk_url + '@' + (str(revnum) if revnum else ''),
trunk_wc)
prev_ver = Version('1.%d... | 5,358,701 |
def visual_landmarks_cca_heatmap(visual_cca, ax, title='CC'):
""" Plots the visual cca loadings as landmarks
:param ax:
:param visual_cca:
:param title:
:return:
"""
color_map = plt.get_cmap('Reds')
mapcolors = [color_map(int(x * color_map.N / 100)) for x in range(100)]
# Normalize... | 5,358,702 |
def test_many2many_through_ext():
"""
>>> db = get_connection('sqlite://')
>>> db.echo = False
>>> db.metadata.drop_all()
>>> db.metadata.clear()
>>> class User(Model):
... username = Field(CHAR, max_length=20)
>>> def _save1(x):
... x['flag'] = '1'
>>> def _save2(x):
... | 5,358,703 |
def waitAndLocate(btn_img, params):
"""
Function to locate a button in the window
:param btn_img: path to the image of the button to look for
:return: coordinates + dimensions of the button
"""
start = time.time()
while True:
if time.time() - start > (3*60):
print("Time... | 5,358,704 |
def make_preds_batch(classifier: nn.Module,
batch_elements: List[SentenceEvidence],
device: str=None,
criterion: nn.Module=None,
tensorize_model_inputs: bool=True) -> Tuple[float, List[float], List[int], List[int]]:
"""Batch predict... | 5,358,705 |
def extract_validation(filename):
"""Extract certificate validation data from input file."""
with open(filename, "rt") as input_file:
event = json.load(input_file)
for participante in event.get("participantes", []):
fingerprint = participante["fingerprint"]
certificate = {
... | 5,358,706 |
def quick_sort(array):
"""
Not Inplace, but Standard version
"""
if array == []:
return []
else:
pivot = array[-1]
smaller = quick_sort([x for x in array[0:-1] if x <= pivot])
larger = quick_sort([x for x in array[0:-1] if x > pivot])
return smaller + [pivot] ... | 5,358,707 |
def calc_laplacian_matrix(D, W):
"""
给定图的度矩阵和相似度矩阵,计算拉普拉斯矩阵
:param W: 相似度矩阵
:param D: 图的度矩阵
:return: 拉普拉斯矩阵
"""
return D - W | 5,358,708 |
def on_fire(client, userdata, message):
"""
*Callback function parses a FireStarted message and switches FireState from "undefined" to "started"*
.. literalinclude:: /../examples/firesat/fires/main_fire.py
:lines: 68-73
"""
for index, observer in enumerate(app.simulator._observers):
... | 5,358,709 |
def package_upgrade(distupgrade=False):
"""Updates every package present on the system.""" | 5,358,710 |
def _potrf_mhlo(platform, gpu_solver, dtype, a, lower):
"""Cholesky decomposition."""
a_type = ir.RankedTensorType(a.type)
dims = a_type.shape
m, n = dims[-2:]
assert m == n
batch_dims = tuple(dims[:-2])
num_bd = len(batch_dims)
batch = _prod(batch_dims)
lwork, opaque = gpu_solver.build_potrf_descrip... | 5,358,711 |
def deduplicate_fasta(args):
"""Deduplicata a fasta file."""
# get the number of genes per cluster
with MaybeCompressed(args.fasta, "rt") as stream:
fbuffer = []
ftuple = namedtuple("FTUPLE", ["name", "sequence"])
for name, sequence in fasta(stream, toupper=False, fullnames=True):
... | 5,358,712 |
def _bin2bcd(value):
"""Convert a binary value to binary coded decimal.
:param value: the binary value to convert to BCD. (required, no default)
"""
return value + 6 * (value // 10) | 5,358,713 |
def osm_get_info(idx):
"""Получаем информацию об административной территории
"""
link = 'https://www.openstreetmap.org/api/0.6/relation/' + str(idx)
response = requests.get(link)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'xml')
subarea_ids = [member.get('ref... | 5,358,714 |
def get_test_cases_coverage(session_id):
"""
coverage by test case
"""
tc_stats={}
tc_stats_list=[]
total_executed=0
sql='SELECT DISTINCT(test_id) FROM stats WHERE session_id=:sid AND test_id!="null"'
params={"sid":session_id}
conn=sqlite3.connect(CONNECTION_STRING)
c=conn.cursor... | 5,358,715 |
def can_minimize_file(file_path):
"""Check to see if we support minimization for this file."""
# If this is not a binary file, we should be able to minimize it in some way.
if not utils.is_binary_file(file_path):
return True
# Attempt to minimize IPC dumps.
if file_path.endswith(testcase_manager.IPCDUMP_... | 5,358,716 |
def generate_new_xen_xml(VIRSH_TEMPLATE, vm_name,
disk_img,
mac_addr,
memory_size=1048576, # 1GB of memory
cpu_count=1):
"""
Given a name, disk, and mac, this will output the appropriate xml
confi... | 5,358,717 |
def literal_query(query):
"""Don't interprete any special query syntax
SQLite's FTS extensions support special query syntax for AND, OR and
prefix searches, as well as grouping and negation. There are not of much
use in the dictionary case, but they break some legitimate queries. So
let's treat all... | 5,358,718 |
def gene2symbol(key: str, value: str) -> Dict[str, str]:
"""Map between S. pombe gene IDs, symbols, synonyms and names.
# Arguments
key: str, one of {"ID", "Symbol", "Synonym", "Name"}
value: str, one of {"ID", "Symbol", "Synonym", "Name"}
# Returns
dict: key value mapping
"""
... | 5,358,719 |
def initialize_application():
"""Create a config file and database as necessary"""
config_dir = util.get_config_dir()
config_path = util.get_config_path()
if os.path.exists(config_path):
if not click.confirm(('Churn has already been initialized.\n'
'Delete your ex... | 5,358,720 |
def run(num_trials=NUM_TRIALS_DEFAULT, dataset=get_dataset_names(),
algorithm=get_algorithm_names()):
"""
- Step 1: load preprocessed data and split it into train and test by 2/3 and 1/3
- Step 2: train and evaluate algorithm by calling `run_eval_alg`
- Step 3: write results (metrics eval and pr... | 5,358,721 |
def graph_from_string(s):
"""
Turn a string like "1 2; 1->2" into a graph.
"""
vertex_string, edge_string = s.split(';')
vertices = vertex_string.split()
edge_pairs = []
for edge_sequence in edge_string.split():
sequence_nodes = edge_sequence.split('->')
for tail, head in z... | 5,358,722 |
def WideResnetBlocknt(channels, strides=(1,1), channel_mismatch=False, batchnorm='std', parameterization='ntk'):
"""A WideResnet block, with or without BatchNorm."""
Main = stax_nt.serial(_batch_norm_internal(batchnorm), stax_nt.Relu(), stax_nt.Conv(channels, (3,3), strides, padding='SAME', parameterization=para... | 5,358,723 |
def saveOTF(font, destFile, truetype=False):
"""Save a RoboFab font as an OTF binary using ufo2fdk."""
if truetype:
compiler = compileTTF
else:
compiler = compileOTF
otf = compiler(font, featureCompilerClass=RobotoFeatureCompiler,
kernWriter=RobotoKernWriter)
otf.... | 5,358,724 |
def inverse_pinhole_matrix(pinhole, eps=1e-6):
"""
Returns the inverted pinhole matrix from a pinhole model
"""
assert len(pinhole.shape) == 2 and pinhole.shape[1] == 12, pinhole.shape
# unpack pinhole values
fx, fy, cx, cy = torch.chunk(pinhole[..., :4], 4, dim=1) # Nx1
# create out... | 5,358,725 |
async def init_integration_empty_response(hass) -> MockConfigEntry:
"""Set up the Nightscout integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_URL: "https://some.url:1234"},
)
with patch(
"homeassistant.components.nightscout.NightscoutAPI.get_sg... | 5,358,726 |
def create_DCT_NETWORK_INFO(networkid: str) -> dict:
"""Computes dictionary DCT_NETWORK_INFO for XML file
:param networkid: network identifier
:type networkid: str
:return: dict
:rtype: [type]
"""
DCT_NETWORK_INFO.update({"id": networkid})
return DCT_NETWORK_INFO | 5,358,727 |
def retrieve_question(request, uuid):
"""
"""
try:
question = Question.objects.get(pk=uuid)
except (Question.DoesNotExist, ValueError):
response_data = {
"error": {
"state": "not found",
"details": "Question object with ID {} could not be found... | 5,358,728 |
def gamescriptToJson(title: str, version: str = None) -> dict:
"""
Get game script heirarchy as a dictionary (for saving as json, etc)
"""
scripts = GameScript.objects.all().filter(title=title)
if version:
scripts = scripts.filter(version=version)
if len(scripts) == 0:
print("No... | 5,358,729 |
def is_ignored_faces(faces):
"""Check if the faces are ignored faces.
Args:
faces: Encoded face from face_recognition.
Returns:
bool: If a not ignored face appeared, return false, otherwise true.
"""
global ignored_faces
for face in faces:
matches = face_recognition.co... | 5,358,730 |
def get_prediction_model_status(hub_name: Optional[str] = None,
prediction_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPredictionModelSta... | 5,358,731 |
def set_description_bool(resources, resource_texts):
"""Add bool 'has_description' for every resource."""
for i in resources:
resources[i]["has_description"] = False
if resource_texts.get(i):
resources[i]["has_description"] = True | 5,358,732 |
def build_2d_grid(ir):
""" Build simple grid with a column for each gate."""
grid = []
for g in ir.gates:
step = [None] * ir.ngates
if g.is_single():
step[g.idx0] = g
if g.is_ctl():
step[g.ctl] = g.ctl
step[g.idx1] = g
grid.append(step)
... | 5,358,733 |
def run_thread(execute=True):
"""
Start pdconfd service as a thread.
This function schedules pdconfd to run as a thread and returns immediately.
"""
global configManager
configManager = ConfigManager(settings.PDCONFD_WRITE_DIR, execute)
reactor.callFromThread(listen, configManager) | 5,358,734 |
def parse_args():
"""
Wrapper function of argument parsing process.
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'--save_loc', type=str, default='.',
help='where to save results'
)
parser.add_argument(
'--log_dir', type=str, default=os.path.join('logs'... | 5,358,735 |
def emmental_collate_fn(
batch: Union[List[Tuple[Dict[str, Any], Dict[str, Tensor]]], List[Dict[str, Any]]],
min_data_len: int = 0,
max_data_len: int = 0,
) -> Union[Tuple[Dict[str, Any], Dict[str, Tensor]], Dict[str, Any]]:
"""Collate function.
Args:
batch: The batch to collate.
min_da... | 5,358,736 |
def trap_jac_factory(j, dt):
"""Factory function to return a function for evaluating the Jacobian
of the trapezoidal formula. This returns a function of x_n (x at
this time step).
:param j: Jacobian of the function of x.
:param dt: time step.
:returns: trap_jac, callable which takes x_n and ev... | 5,358,737 |
async def about(message: types.Message):
"""return the current price of $JOE and $AVAX, the market cap, the circulating supply and the TVL."""
if not timer.canMessageOnChatId(message.chat.id):
return
about = JoeSubGraph.getAbout()
await bot.send_message(message.chat.id, about) | 5,358,738 |
def get_position_object(file_path: FilePathType):
"""
Read position data from .bin or .pos file and convert to
pynwb.behavior.SpatialSeries objects. If possible it should always
be preferred to read position data from the `.bin` file to ensure
samples are locked to ecephys time courses.
Paramet... | 5,358,739 |
def throw_dice(n):
"""Throw `n` dice, returns list of integers"""
results = []
while n > 0:
results += [random.randint(1,6)]
n = n-1
return results | 5,358,740 |
def verify_token_signature(token):
"""Verify the signature of the token and return the claims
such as subject/username on valid signature"""
key = jwk.JWK.from_password(flask.current_app.config.get("SECRET_KEY"))
try:
jwttoken = jwt.JWT(key=key, jwt=token, algs=["HS256"])
return json.loa... | 5,358,741 |
def logtime_r2(t, y, ppd):
"""
Convert y=f(t) data from linear in time to logarithmic in time.
Args:
t: is the input time vector, linearly spaced
y: is the input vector of y values
ppd: number of points per decade for the output
Returns:
A 3-tuple (tout, you... | 5,358,742 |
def parcours_serpentin(n):
"""Retourne la liste des indices (colonne,ligne) (!!attention ici
ligne et colonne sont inversées!!) des cases correspondant à un
parcours de tableau de taille n x n en serpentin.
Ex: pour T = [ [1,2,3],
[4,5,6],
[7,8,9] ]
le parcour... | 5,358,743 |
def server():
"""
This function uses the socket and the time module.
The function is used to run a server that can be connected to by client.py and interacts with SockItAll.py.
The function is being ran on a separate thread 'thread_two'.
All messages produced by the server will be distinctly marked ... | 5,358,744 |
def bdev_rbd_unregister_cluster(client, name):
"""Remove Rados cluster object from the system.
Args:
name: name of Rados cluster object to unregister
"""
params = {'name': name}
return client.call('bdev_rbd_unregister_cluster', params) | 5,358,745 |
def get_transcript(ContactId=None, MaxResults=None, NextToken=None, ScanDirection=None, SortOrder=None, StartPosition=None, ConnectionToken=None):
"""
Retrieves a transcript of the session. Note that ConnectionToken is used for invoking this API instead of ParticipantToken.
See also: AWS API Documentation
... | 5,358,746 |
def add_image_background(filepath):
""" adds image background to the scene
args:
string, file path to background image
returns:
None
"""
img = bpy.data.images.load(filepath)
for area in bpy.context.screen.areas:
if area.type == 'VIEW_3D':
space_data = area.spa... | 5,358,747 |
def _write_service(service, filename):
"""Write the passed service to 'filename'"""
_write_json(service.to_data(), filename) | 5,358,748 |
def plot_text_len(file):
"""
文本长度可视化
:param file:
:return:
"""
with open(file, "r", encoding='utf-8') as f:
lines = f.readlines()
# 获取所有文本的token 和 char 特征长度
all_length = [(len(i.strip().split("\t")[1].split(" ")), len(i.strip().split("\t")[2].split(" "))) for i in lines]
all_... | 5,358,749 |
def print_tc_footer(log, tc_name):
"""
print_tc_footer
@param log Logger object
@param tc_name Test Case name
"""
log.info("{}".format(STANZA))
log.info("{}{}{}".format(SML_STANZA, tc_name, " END")) | 5,358,750 |
def get_rndc_secret():
"""Use the singleton from the DesignateBindCharm to retrieve the RNDC
secret
:returns: str or None. Secret if available, None if not.
"""
return DesignateBindCharm.singleton.get_rndc_secret() | 5,358,751 |
def interpolate_pairs(data_key, key, experiment, n_pairs, n_interp, save_path):
"""
Interpolate images
"""
exp, sampler, encoder, decoder = experiment
# Load the data that we'll use for interpolation
x_for_interpolation = exp.data_loader((2*n_pairs,), key=data_key)
# Split the data into pa... | 5,358,752 |
def transform_tweet(source_tweet):
"""
Perform transformation on one tweet, producing a new, transformed tweet.
:param source_tweet: Tweet text to transform
:type source_tweet: str
:return: Transformed tweet text
:rtype: str
"""
no_emojis = replace_emojis(source_tweet)
as_tokens = tokenize_string(no_... | 5,358,753 |
def pdFrame(file):
"""Creates a pandas data frame from a json log file
Args:
file: json log file to read
Returns:
pandas data frame
"""
logger.debug("creating pandas data frame from {}".format(file))
data = []
with open(file) as f:
for line in f:
tmp = [... | 5,358,754 |
def generate_figure(nc_file: str,
field_names: list,
show: bool = True,
save_path: str = None,
max_y: Optional[int] = 12,
dpi: Optional[int] = 200,
image_name: Optional[str] = None,
... | 5,358,755 |
def fitDataBFGSM2(M, val, c_w_l, init=None, nozero=True, k=3e34, lam=1., name='W_Abundances_grid_puestu_adpak_fitscaling_74_0.00000_5.00000_1000_idlsave'): #init is the three initial values of the gaussian needed to fit the data
""" function for determining the optimal fit given the desired parabolic regularization... | 5,358,756 |
def shutdown_application(app, env, docname):
"""
Shutdown qt application
"""
if herculeum.sphinx.helpers.qt_app is not None:
herculeum.sphinx.helpers.qt_app = None | 5,358,757 |
def set_complete_cfg_spacy(false_or_true: str):
"""Set all SpaCy configuration parameters to the same logical value."""
return pytest.helpers.backup_config_params(
cfg.cls_setup.Setup._DCR_CFG_SECTION_SPACY,
[
(cfg.cls_setup.Setup._DCR_CFG_SPACY_TKN_ATTR_CLUSTER, false_or_true),
... | 5,358,758 |
def guard_unexpected_errors(func):
"""Decorator to be used in PyObjC callbacks where an error bubbling up
would cause a crash. Instead of crashing, print the error to stderr and
prevent passing to PyObjC layer.
For Python 3, print the exception using chaining. Accomplished by setting
the cause of :... | 5,358,759 |
def get_lun_ids(service_instance=None):
"""
Return a list of LUN (Logical Unit Number) NAA (Network Addressing Authority) IDs.
"""
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(service_instance=service_in... | 5,358,760 |
def parse_midi_file(midi_file,
max_notes=float('Inf'),
max_time_signatures=1,
max_tempos=1,
ignore_polyphonic_notes=True,
convert_to_drums=False,
steps_per_quarter=16):
"""Summary
Parameters
... | 5,358,761 |
def filter_prediction(disable_valid_filter, disable_extra_one_word_filter, pred_token_2dlist_stemmed):
"""
Remove the duplicate predictions, can optionally remove invalid predictions and extra one word predictions
:param disable_valid_filter:
:param disable_extra_one_word_filter:
:param pred_token_2... | 5,358,762 |
def mlmc_test(integrand_qmcpy, n, l, n0, eps, l_min, l_max):
"""
Multilevel Monte Carlo test routine
Args:
integrand_qmcpy (function):
low-level routine for l level estimation such that
Args:
x (ndarray): nx(integrand._dim_at_level(l)) array of sampl... | 5,358,763 |
def simple_dict_event_extractor(row, condition_for_creating_event, id_field, timestamp_field, name_of_event):
"""
Takes a row of the data df and returns an event record {id, event, timestamp}
if the row satisfies the condition (i.e. condition_for_creating_event(row) returns True)
"""
if condition_fo... | 5,358,764 |
def split_fused_prelu(input_graph_def: util.GraphDef) -> util.GraphDef:
"""
This function looks for fused operations that include a 'Prelu'-activation.
Matching nodes will be split into individual operations.
TFJS uses fused operations for performance.
Some fused activations aren't supported ... | 5,358,765 |
def test_harvest_lost_resources(pool):
"""Test unreferenced resources are returned to the pool."""
def get_resource_id():
"""
Ensures ``Resource`` falls out of scope before calling
``_harvest_lost_resources()``.
"""
return id(pool.get_resource()._resource)
r_id = ge... | 5,358,766 |
def __call__for_keras_init_v1(self, shape, dtype=None, partition_info=None):
""" Making keras VarianceScaling initializers v1 support dynamic shape.
"""
if dtype is None:
dtype = self.dtype
scale = self.scale
scale_shape = shape
if partition_info is not None:
scale_shape = partition_info.full_shape
... | 5,358,767 |
def create_line(net, from_bus, to_bus, length_km, std_type, name=None, index=None, geodata=None,
df=1., parallel=1, in_service=True, max_loading_percent=nan):
""" create_line(net, from_bus, to_bus, length_km, std_type, name=None, index=None, \
geodata=None, df=1., parallel=1, in_serv... | 5,358,768 |
def get_initmap(X, A=None, standardize=False, cov_func=None):
""" Give back parameters such that we have the L U decomposition of the
product with A (if given, or the PCA scores if not).
That is we will get back:
X[:, perm]*L*U + b = ((X-meanvec)/stdvec)*A
where A are PCA directions if not g... | 5,358,769 |
def publish_screenshot_sets(
access_token: AccessToken,
localization_dir: str,
localization_id: str,
):
"""Publish the screenshot sets from assets on disk."""
screenshots_dir = os.path.join(localization_dir, "screenshots")
if not os.path.isdir(screenshots_dir):
print_clr(
f" ... | 5,358,770 |
def read_FQ_matlab(file_open):
""" Opens FISH-quant result files generated with Matlab (tab-delimited text file).
Args:
file_open (string): string containing the full file name.
Returns:
dictionary containing outlines of cells, and if present the detected spots.
"""
# Open file
... | 5,358,771 |
def create_dfn(settings, seed, fname='csp'):
"""
Settings:
HL1 is half-length of outer box.
HL2 is half-length of fracture center box.
HL3 is half-length of inner box.
"""
document()
guids, midpt = srfc_guids(), (0,0,0)
random.seed(seed)
bsrf_ids = cube(settings['HL1']*2.)
gu... | 5,358,772 |
def create_default_reporting_options(embedded=True, config={}):
"""
config must follow this scheme:
{
`table_name`: {
`option1`: `value1`
}
}
The different options will depend on the table role.
- for ALL tables:
{n
'data' : {
'remove_... | 5,358,773 |
def get_patient_note(state, patient_id, note_id, *args, **kwargs):
"""
Return a note for a patient.
---
tags: ["FHIR"]
parameters:
- name: patient_id
in: path
description: ID of the patient of interest
required: true
schema:
type: string
... | 5,358,774 |
def _html_build_item(tag: str, text: str, attributes: map = None, include_tags=True) -> str:
"""Builds an HTML inline element and returns the HTML output.
:param str tag: the HTML tag
:param str text: the text between the HTML tags
:param map attributes: map of attributes
:param bool include_ta... | 5,358,775 |
def sim_sample(
out_prefix: str,
sample_id: int,
chrom_start: int = 0,
chrom_end: int = 10000,
start_rate: float = 0.001,
end_rate: float = 0.01,
mut_rate: float = 0.01,
) -> Dict[str, File]:
"""
Simulate sequencing data for one sample (assume one chromosome).
regions are sequen... | 5,358,776 |
def sync_with_douban():
"""三件事:一从豆瓣上同步评论到本地;二将本地作品的评论同步到豆瓣;三:将本地推荐的作品同步到豆瓣
一不需要access_token,二三需要
策略:豆瓣本次登陆将access_token存到user的access_token字段中
access_token有效期3920s > 1h,定时任务1h1次,在豆瓣用户登陆账户一小时之类利用有效期内的access_token抓取其评论数据
分析:豆瓣用户每次登陆之后在本站的评论或者推荐信息 与 在豆瓣上的评论信息,在一小时之类必定会与豆瓣进行有效同步
"""
flask_app = ... | 5,358,777 |
def vif_col(X, y, col_name):
"""计算vif
计算具体一个column的vif,
一般阈值在5或者10,超过这个数字则表明有
共线性。
Attributes:
X (pd.DataFrame): 自变量
y (pd.Series): 因变量
col_name (str): 需要判断的列
References:
James, Gareth, Daniela Witten, Trevor Hastie, and Robert Tibshirani.
An Introduct... | 5,358,778 |
def test_get_group_not_found(client):
"""Test that a group that does not exist returns a HTTP Not Found."""
response = client.get('/group/does-not-exist')
assert response.status == '404 Not Found'
assert response.data == b'' | 5,358,779 |
def get_mesh_stat(stat_id_start_str, attr_value, xmin, ymin, xmax, ymax):
"""
地域メッシュの統計情報を取得する
@param stat_id_start_str 統計IDの開始文字 この文字から始まるIDをすべて取得する.
@param attr_value cat01において絞り込む値
@param xmin 取得範囲
@param ymin 取得範囲
@param xmax 取得範囲
@param ymax 取得範囲
"""
rows = database_proxy.ge... | 5,358,780 |
def free(ptr):
"""Free the given pointer, as returned by C malloc. If it is NULL, nothing happens."""
libc.free(ptr) | 5,358,781 |
def babel_extract(fileobj, keywords, comment_tags, options):
"""Babel extraction method for Jinja templates.
.. versionchanged:: 2.3
Basic support for translation comments was added. If `comment_tags`
is now set to a list of keywords for extraction, the extractor will
try to find the best... | 5,358,782 |
def main(directory='.', verbose=True):
"""Lists "data" files recursively in a given directory, tar files
are extracted.
The "data" files have :file:`info` and :file:`pickle` extensions.
TODO: not only recognize .tar and .tar.gz and .tgz but .zip...
"""
filelist = list()
director... | 5,358,783 |
def onedthreegaussian(x, H, A1, dx1, w1, A2, dx2, w2, A3, dx3, w3):
"""
Returns two 1-dimensional gaussian of form
H+A*numpy.exp(-(x-dx)**2/(2*w**2))
"""
g1 = A1 * numpy.exp(-(x-dx1)**2 / (2*w1**2))
g2 = A2 * numpy.exp(-(x-dx2)**2 / (2*w2**2))
g3 = A3 * numpy.exp(-(x-dx3)**2 / (2*w3**2))
... | 5,358,784 |
def match_any_if_key_matches(audit_id, result_to_compare, args):
"""
We want to compare things if we found our interested key
Even if the list does not have my interested name, it will pass
Match dictionary elements dynamically. Match from a list of available dictionaries
There is an argument: matc... | 5,358,785 |
def confusion_matrix_by_prob(true: np.ndarray,
predicted_prob: np.ndarray,
thresholds: Optional[Union[list, tuple, np.ndarray]] = None,
pos_label: Union[bool, str, int] = _DEFAULT_POS_LABEL,
output_metric... | 5,358,786 |
def group_slaves_by_key_func(
key_func: _GenericNodeGroupingFunctionT,
slaves: Sequence[_GenericNodeT],
sort_func: _GenericNodeSortFunctionT = None,
) -> Mapping[_KeyFuncRetT, Sequence[_GenericNodeT]]:
""" Given a function for grouping slaves, return a
dict where keys are the unique values returned ... | 5,358,787 |
def init_clfs():
""" init classifiers to train
Returns:
dict, clfs
"""
clfs = dict()
# clfs['xgb'] = XGBClassifier(n_jobs=-1)
clfs['lsvc'] = LinearSVC()
return clfs | 5,358,788 |
def get_field_types():
"""Get a dict with all registration field types."""
return get_field_definitions(RegistrationFormFieldBase) | 5,358,789 |
def get_md_links(filepath):
"""Get markdown links from a md file.
The links' order of appearance in the file IS preserved in the output.
This is to check for syntax of the format [...](...).
The returned 'links' inside the () are not checked for validity or
subtle differences (e.g. '/' vs no '/' at... | 5,358,790 |
def write_number_string(fp, data_element, padding=' '):
"""Handle IS or DS VR - write a number stored as a string of digits."""
# If the DS or IS has an original_string attribute, use that, so that
# unchanged data elements are written with exact string as when read from file
val = data_element.value
... | 5,358,791 |
def dask_to_zarr(df, z, loc, chunk_size, nthreads: int, msg: str = None):
# TODO: perhaps change name of Dask array so it does not get confused with a dataframe
"""
Creates a Zarr hierarchy from a Dask array.
Args:
df (): Dask array.
z (): Zarr hierarchy.
loc (): Location to wri... | 5,358,792 |
def get_reactor_logs(project_id, application_id, api_key=None, **request_kwargs):
"""
Get the logs of a Reactor script.
:param project_id: The Project of the Application.
:type project_id: str
:param application_id: The Application to get the script logs for.
:type application_id: str
:para... | 5,358,793 |
def bids_init(bids_src_dir, overwrite=False):
"""
Initialize BIDS source directory
:param bids_src_dir: string
BIDS source directory
:param overwrite: string
Overwrite flag
:return True
"""
# Create template JSON dataset description
datadesc_json = os.path.join(bids_src... | 5,358,794 |
def summation_i_squared(n):
"""Summation without for loop"""
if not isinstance(n, int) or n < 1:
return None
return int(((n*(n+1)*(2*n+1))/6)) | 5,358,795 |
def str_to_number(this):
"""
Convert string to a Number
"""
try:
return mknumber(int(this.value))
except ValueError:
return mknumber(float(this.value)) | 5,358,796 |
def _LinterRunCommand(cmd, debug, **kwargs):
"""Run the linter with common RunCommand args set as higher levels expect."""
return cros_build_lib.RunCommand(cmd, error_code_ok=True, print_cmd=debug,
debug_level=logging.NOTICE, **kwargs) | 5,358,797 |
def plotTSNE(Xdata, target = None, useMulti=True, num=2500, savename=None, njobs=4, size=4, cmap=None, dim=(12,8)):
"""
Plot TSNE for training data
Inputs:
> Xdata: The training feature data (DataFrame)
> target: The training target data (Series)
> num (2500 by default): The number o... | 5,358,798 |
async def test_arm_home_with_pending(hass):
"""Test arm home method."""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
{
"alarm_control_panel": {
"platform": "manual",
"name": "test",
"code": CODE,
... | 5,358,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.