content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def _build_timecode(time, fps, drop_frame=False, additional_metadata=None):
"""
Makes a timecode xml element tree.
.. warning:: The drop_frame parameter is currently ignored and
auto-determined by rate. This is because the underlying otio timecode
conversion assumes DFTC based on rate.
... | 1,700 |
def transform(nodes, fxn, *args, **kwargs):
"""
Apply an arbitrary function to an array of node coordinates.
Parameters
----------
nodes : numpy.ndarray
An N x M array of individual node coordinates (i.e., the
x-coords or the y-coords only)
fxn : callable
The transformat... | 1,701 |
def create_input(
basedir, pertdir, latout=False, longwave=False, slc=slice(0, None, None)
):
"""Extract variables from a given directory and places into dictionaries.
It assumes that base and pert are different directories and only one
experiment output is present in each directory.
Slicing into ... | 1,702 |
def get_tax_proteins(tax_id, tax_prot_dict, prot_id_dict, gbk_dict, cache_dir, args):
"""Get the proteins linked to a tax id in NCBI, and link the tax id with the local db protein ids
:param tax_id: str, NCBI tax db id
:param tax_prot_dict: {ncbi tax id: {local db protein ids}}
:param prot_id_dict: dic... | 1,703 |
def define_features_vectorizer(columns, training_data, testing_data = None, ngramrange=(1,1)):
"""
Define the features for classification using CountVectorizer.
Parameters
----------
column: String or list of strings if using multiple columns
Names of columns of df tha... | 1,704 |
def resolve_4d_input_blob(
hparams: tf.contrib.training.HParams,
runtime_bs: tf.Tensor,
features: dict,
feature_columns: list,
info_log: bool = False,
) -> tf.Tensor:
"""Convert a dict feature input to a 4D input cube with dimension (NHWC).
this function is experimental.
Arguments:
... | 1,705 |
def GetPostgreSQLLoginInfo():
"""
* Get database login information from pem file
"""
passfile = '/mnt/data/other/pem/sinnud_pg.dat'
with open(passfile, 'r') as f:
passinfo = f.read().strip()
(host, user, dbname, password, port) = passinfo.split()
if os.path.isfile(passfile):
... | 1,706 |
def trace_condition(variables: TemplateVarsType) -> Generator:
"""Trace condition evaluation."""
trace_element = condition_trace_append(variables, trace_path_get())
trace_stack_push(trace_stack_cv, trace_element)
try:
yield trace_element
except Exception as ex:
trace_element.set_erro... | 1,707 |
def download_pictures(recent_seach_tweets):
"""
Download pictures from tweets
:param recent_seach_tweets: list of dictionnaries
"""
# Downloading pictures
print('%s - Downloading %d tweets' % (datetime.datetime.now().strftime('%d/%m/%Y - %H:%M'), len(recent_seach_tweets)))
for tw in recent_s... | 1,708 |
def show_graph_unique_not_callback(n_clicks, input_box):
""" Function which is called by a wrapped function in another module. It takes
user input in a text box, returns a graph if the query produces a hit in Solr.
Returns an error message otherwise.
ARGUMENTS: n_clicks: a parameter of the HTML button... | 1,709 |
def _add_string_datatype(graph, length):
"""Add a custom string datatype to the graph refering.
Args:
graph (Graph): The graph to add the datatype to
length (int): The maximim length of the string
Returns:
URIRef: The iri of the new datatype
"""
iri = rdflib_cuba[f"_datatyp... | 1,710 |
def cercle(x, y, r, color='black'):
"""tracé d'un cercle de centre (x,y) et de rayon r"""
MonCanvas.create_oval(x - r, y - r, x + r, y + r, outline=color) | 1,711 |
def get_selected_shipping_country(request):
"""Returns the selected shipping country for the passed request.
This could either be an explicitely selected country of the current
user or the default country of the shop.
"""
customer = customer_utils.get_customer(request)
if customer:
if c... | 1,712 |
def func_item_iterator_next(*args):
"""
func_item_iterator_next(fii, testf, ud) -> bool
"""
return _ida_funcs.func_item_iterator_next(*args) | 1,713 |
def find_score_maxclip(tp_support, tn_support, clip_factor=ut.PHI + 1):
"""
returns score to clip true positives past.
Args:
tp_support (ndarray):
tn_support (ndarray):
Returns:
float: clip_score
Example:
>>> # ENABLE_DOCTEST
>>> from ibeis.algo.hots.score_... | 1,714 |
def rho_prof_out(cluster, fileout, **kwargs):
"""Write density profile to file
Parameters
----------
cluster : class
StarCluster
fileout : file
opened file to write data to
Returns
-------
None
Other Parameters
----------------
kwargs : str
key word... | 1,715 |
def create_sponsor():
"""
Creates a new sponsor.
---
tags:
- sponsor
summary: Create sponsor
operationId: create_sponsor
requestBody:
content:
application/json:
schema:
allOf:
- $ref: '#/components/schema... | 1,716 |
def download_or_copy(uri, target_dir, fs=None) -> str:
"""Downloads or copies a file to a directory.
Downloads or copies URI into target_dir.
Args:
uri: URI of file
target_dir: local directory to download or copy file to
fs: if supplied, use fs instead of automatically chosen FileS... | 1,717 |
def main():
""" Handle command-line invocation of this script """
# TODO: read directory, num_annotators and coverage_level as
# command-line parameters
gen_spreadsheets(3, 1, 'data-20150519/') | 1,718 |
async def test_sqlite_import(hass, client, caplog, monkeypatch):
"""Test import from sqlite keys file."""
m_open = mock_open(read_data="will raise JSONDecodeError")
monkeypatch.setattr(client, "is_connected", Mock(return_value=True))
monkeypatch.setattr(client, "connect", Mock(return_value=True))
w... | 1,719 |
def test_simplest_numbers_generator(test):
"""To test the simplest_numbers_generator, use:
test_simplest_numbers_generator("for")
-- to see the generator behaviour on for loop
test_simplest_numbers_generator("next")
-- to see the generator behaviour on next() call
"""
print_header("Te... | 1,720 |
def main(session):
""" Parse command line arguments, run recordData
and write the results into a csv file.
"""
# Get the services ALMemory and ALMotion.
memory_service = session.service("ALMemory")
motion_service = session.service("ALMotion")
# Set stiffness on for Head motors
motion_s... | 1,721 |
def startup():
""" Starts everything up """
settings = {
'telegram_token': os.environ.get('TELEGRAM_TOKEN'),
'telegram_chat_id': os.environ.get('TELEGRAM_CHAT_ID'),
'gotify_url': os.environ.get('GOTIFY_URL'),
'gotify_token': os.environ.get('GOTIFY_TOKEN'),
'port': int(os.... | 1,722 |
def as_yaml(config: Dict[str, Any], **yaml_args: Any) -> str:
"""Use PyYAML library to write YAML file"""
return yaml.dump(config, **yaml_args) | 1,723 |
def retrieve(filen,start,end):
"""Retrieve a block of text from a file.
Given the name of a file 'filen' and a pair of start and
end line numbers, extract and return the text from the
file.
This uses the linecache module - beware of problems with
consuming too much memory if the cache isn't cleared."""
... | 1,724 |
def test_file_groups_multiple_levels_nested_work_on_and_protect_dirs_with_pattern_and_debug(duplicates_dir, capsys):
"""Multiple protect dirs
Protect:
ki1 -> 6
ki1/df -> 3
ki1/df/ki12 -> 4
ki1/df/ki13 -> 4
ki1/df/ki13/ki14 -> 4
df2 -> 3
total: 24
Work_On:
ki1/df -> 4
ki... | 1,725 |
def assert_equal(actual: numpy.bool_, desired: numpy.ndarray):
"""
usage.dask: 1
"""
... | 1,726 |
def L1_Charbonnier_loss(predict, real):
"""
损失函数
Args:
predict: 预测结果
real: 真实结果
Returns:
损失代价
"""
eps = 1e-6
diff = tf.add(predict, -real)
error = tf.sqrt(diff * diff + eps)
loss = tf.reduce_mean(error)
return loss | 1,727 |
def correction_byte_table_h() -> dict[int, int]:
"""Table of the number of correction bytes per block for the correction
level H.
Returns:
dict[int, int]: Dictionary of the form {version: number of correction
bytes}
"""
table = {
1: 17, 2: 28, 3: 22, 4: 16, 5: 22, 6: 28, 7: ... | 1,728 |
def stash_rename(node_id, new_name):
"""Renames a node."""
return stash_invoke('rename', node_id, new_name) | 1,729 |
def _xor(cpu_context: ProcessorContext, instruction: Instruction):
""" XOR """
operands = instruction.operands
opvalue1 = operands[0].value
opvalue2 = operands[1].value
width = get_max_operand_size(operands)
result = opvalue1 ^ opvalue2
cpu_context.registers.cf = 0
cpu_context.registers... | 1,730 |
def get_fake_value(attr): # attr = (name, type, [dim, [dtype]])
""" returns default value for a given attribute based on description.py """
if attr[1] == pq.Quantity or attr[1] == np.ndarray:
size = []
for i in range(int(attr[2])):
size.append(np.random.randint(100) + 1)
to_s... | 1,731 |
def find_x(old_time,omega,new_time):
"""
Compute x at the beginning of new time array.
"""
interp_omega=spline(old_time,omega)
x=interp_omega(new_time[0])**(2./3)
return x | 1,732 |
def _platformio_library_impl(ctx):
"""Collects all transitive dependencies and emits the zip output.
Outputs a zip file containing the library in the directory structure expected
by PlatformIO.
Args:
ctx: The Skylark context.
"""
name = ctx.label.name
# Copy the header file to the desired destinati... | 1,733 |
def repeat_interleave(x, arg):
"""Use numpy to implement repeat operations"""
return paddle.to_tensor(x.numpy().repeat(arg)) | 1,734 |
def quantize_arr(arr, min_val=None, max_val=None, dtype=np.uint8):
"""Quantization based on real_value = scale * (quantized_value - zero_point).
"""
if (min_val is None) | (max_val is None):
min_val, max_val = np.min(arr), np.max(arr)
scale, zero_point = choose_quant_params(min_val, max_val, ... | 1,735 |
def EST_NOISE(images):
"""Implementation of EST_NOISE in Chapter 2 of Trucco and Verri."""
num = images.shape[0]
m_e_bar = sum(images)/num
m_sigma = np.sqrt(sum((images - m_e_bar)**2) / (num - 1))
return m_sigma | 1,736 |
def continue_cad_funcionario(request):
""" Continuação do Cadastro do Funcionário.
"""
usuario = request.user
try:
funcionario = Funcionario.objects.get(usuario=usuario)
except Exception:
raise Http404()
if funcionario and request.method == "POST":
form = FuncionarioForm(... | 1,737 |
def remove_words(i_list, string):
"""
remove the input list of word from string
i_list: list of words to be removed
string: string on the operation to be performed
"""
regexStr = re.compile(r'\b%s\b' %
r'\b|\b'.join(map(re.escape, i_list)))
o_string = re... | 1,738 |
def save_user_time():
"""
Creates a DateTime object with correct save time
Checks if that save time is now
"""
save_time = datetime.utcnow().replace(hour=18, minute=0, second=0, microsecond=0)
return (save_time == (datetime.utcnow() - timedelta(hours=4))) | 1,739 |
def manage_rating_mails(request, orders_sent=[], template_name="manage/marketing/rating_mails.html"):
"""Displays the manage view for rating mails
"""
return render(request, template_name, {}) | 1,740 |
def cancel(request_url: str,
wait: Optional[bool] = False,
poll_interval: Optional[float] = STANDARD_POLLING_SLEEP_TIME,
verbose: Optional[bool] = False) -> int:
"""
Cancel the request at the given URL.
This method returns immediately by default since the API processes
... | 1,741 |
def main():
"""
TODO: To find the best match of DNA similarity.
"""
long_sequence = input('Please give me a DNA sequence to search: ')
short_sequence = input('What DNA sequence would you like to match? ')
best_match = find_match(short_sequence, long_sequence)
print('The best match is '+str(... | 1,742 |
def set_jit_fusion_options():
"""Set PyTorch JIT layer fusion options."""
# set flags if we are using the 21.10 container
if torch.__version__ == "1.10.0a0+0aef44c":
# nvfuser
torch._C._jit_set_profiling_executor(True)
torch._C._jit_set_profiling_mode(True)
torch._C._jit_over... | 1,743 |
def plot_train_progress(scores, img_title, save_path, show, names=None):
"""
A plotting function using the array of loss values saved while training.
:param train_losses, dev_losses: losses saved during training
:return:
"""
nrows, ncols = 2, 3
dx, dy = 2, 1
num_iter = len(scores[0])
... | 1,744 |
def selected_cases(self):
"""Get a list of all grid cases selected in the project tree
Returns:
A list of :class:`rips.generated.generated_classes.Case`
"""
case_infos = self._project_stub.GetSelectedCases(Empty())
cases = []
for case_info in case_infos.data:
cases.append(self.c... | 1,745 |
def create_whatsapp_group(org, subject):
"""
Creates a Whatsapp group using the subject
"""
result = requests.post(
urljoin(org.engage_url, "v1/groups"),
headers=build_turn_headers(org.engage_token),
data=json.dumps({"subject": subject}),
)
result.raise_for_status()
r... | 1,746 |
def compute_MVBS_index_binning(ds_Sv, range_sample_num=100, ping_num=100):
"""Compute Mean Volume Backscattering Strength (MVBS)
based on intervals of ``range_sample`` and ping number (``ping_num``) specified in index number.
Output of this function differs from that of ``compute_MVBS``, which computes
... | 1,747 |
def set_image_exposure_time(exp_time):
"""
Send the command to set the exposure time per frame to SAMI.
Parameters
----------
exp_time (float) : the exposure time in seconds.
Returns
-------
message (string) : DONE if successful.
"""
message = send_command("dhe set obs.exptime ... | 1,748 |
def ping(host):
""" Returns True if host (str) responds to a ping request.
Remember that a host may not respond to a ping (ICMP) request even if the
host name is valid.
Base on https://bit.ly/2TmgeX2 but with pexpect
:param str host: A host name or ip
:return boolean: True if ping is replied cor... | 1,749 |
def stack_exists(client, stack_name):
""" Checks that stack was specified is existing """
cfn_stacks = client.list_stacks()
for cfn_stack in cfn_stacks["StackSummaries"]:
if cfn_stack['StackName'] == stack_name and "COMPLETE" in cfn_stack['StackStatus'] and "DELETE" not in cfn_stack['StackStatus']:
... | 1,750 |
def endorsement_services():
"""Return endorsement service list
Loads all defined service modules unless settings specifies otherwise
"""
global ENDORSEMENT_SERVICES
if ENDORSEMENT_SERVICES is None:
ENDORSEMENT_SERVICES = _load_endorsement_services()
return ENDORSEMENT_SERVICES | 1,751 |
def test_set_initial_open_orders():
"""
Check _set_initial_open_orders method for return
of an empty dictionary.
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
exchange = ExchangeMock()
data_handler = DataHandlerMock()
sb = SimulatedBroker(start_dt, exchange, data_handl... | 1,752 |
def select(arrays, index):
"""
Index each array in a tuple of arrays.
If the arrays tuple contains a ``None``, the entire tuple will be returned
as is.
Parameters
----------
arrays : tuple of arrays
index : array
An array of indices to select from arrays.
Returns
-----... | 1,753 |
def waypoint(waypoint_id):
"""view a book page"""
wp = Waypoint.query.filter_by(id=waypoint_id).first()
options = Option.query.filter_by(sourceWaypoint_id=waypoint_id)
if wp is None:
abort(404)
return render_template('books/waypoint.html', book=wp.book_of, waypoint=wp, options=options) | 1,754 |
def test_insert_requirement(client):
"""
Testing the method for posting information
to insert a requirement on the csv file (synthetic data)
"""
specification = dict(specification_id='X1C2V3B7',
product='OSLC SDK 7',
project='OSLC-Project 7',
... | 1,755 |
def encode_big_endian_16(i):
"""Take an int and return big-endian bytes"""
return encode_big_endian_32(i)[-2:] | 1,756 |
def get_comments_from_fawm_page(
url: str,
username: str,
password: str,
) -> List[Response]:
"""Extract comments from a given FAWM page."""
response = requests.get(url, auth=(username, password))
response.encoding = "UTF-8"
html = response.text
soup = BeautifulSoup(html, "html.parser")
... | 1,757 |
def send_message(service, user_id, message):
"""Send an email message.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
message: Message to be sent.
Returns:
Sent Message.
"""
try:
... | 1,758 |
def get_supported_solvers():
"""
Returns a list of solvers supported on this machine.
:return: a list of SolverInterface sub-classes :list[SolverInterface]:
"""
return [sv for sv in builtin_solvers if sv.supported()] | 1,759 |
def eff_satellite_testing_auto_end_dating(context):
"""
Define the structures and metadata to load effectivity satellites
"""
context.hashed_columns = {
"STG_CUSTOMER_ORDER": {
"CUSTOMER_ORDER_PK": ["CUSTOMER_ID", "ORDER_ID"],
"CUSTOMER_PK": "CUSTOMER_ID",
"O... | 1,760 |
def _length_hint(obj):
"""Returns the length hint of an object."""
try:
return len(obj)
except (AttributeError, TypeError):
try:
get_hint = type(obj).__length_hint__
except AttributeError:
return None
try:
hint = get_hint(obj)
excep... | 1,761 |
def _folder_to_json(jdir, key_path=None, in_memory=True,
ignore_prefix=('.', '_'), dic={}, parse_decimal=False):
""" read in folder structure as json
e.g.
jdir
sub_dir1
data.json
sub_dir2
data.json
_folder_to_json(jdir)
=> {'sub_dir1':{'... | 1,762 |
async def generate_images(queue, filenames):
"""Producer coroutine"""
for filename in filenames:
await queue.put(filename)
print('All task requests sent') | 1,763 |
def chimeric_data():
"""Example containing spanning + junction reads from single fusion."""
return _build_chimeric_data(
[('1', 300, 1, 'T2onc', 420, 1, 2, '100M2208p38M62S', '62M38S', 'R1'),
('1', 300, 1, 'T2onc', 420, 1, 1, '100M2208p52M48S', '48M52S', 'R2'),
('1', 301, 1, 'T2onc', 4... | 1,764 |
def _get_merge_for_alias_key(database, key):
"""Return the Alias record of the merged player.
Allow for value.merge on the record with key srkey being any value.
Return the record if value.merge is None True or False.
Otherwise assume value.merge is integer and use it to retreive and
return a recor... | 1,765 |
def saveusers(argv):
"""Save stdin to users.plist."""
try:
plist = plistlib.readPlist(sys.stdin)
except:
print >>sys.stderr, "Malformed users.plist"
return 2
os.unlink(users_path)
plistlib.writePlist(plist, users_path)
return 0 | 1,766 |
def get_bucket(self):
"""
Documentation:
---
Description:
Use bucket name to return a single S3 bucket object.
---
Returns:
bucket : S3 bucket
S3 bucket object
"""
# return
# 6 dictionary containing Name tag / EC2 instance obj... | 1,767 |
def set_emoji_author(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
"""
This migration establishes the invariant that all RealmEmoji objects have .author set
and queues events for reuploading all RealmEmoji.
"""
RealmEmoji = apps.get_model("zerver", "RealmEmoji")
Realm = apps.ge... | 1,768 |
def requestShutdown(programPath, reason):
"""
Log the shutdown reason and call the shutdown-requesting program.
In the case the service is spawned by launchd (or equivalent), if our
service decides it needs to shut itself down, because of a misconfiguration,
for example, we can't just exit. We may... | 1,769 |
def euclidean_distance(x, y, weight=None):
"""Computes the Euclidean distance between two time series.
If the time series do not have the same length, an interpolation is performed.
Parameters
----------
x : nd-array
Time series x.
y : nd-array
Time series y.
weight: nd-arr... | 1,770 |
def path(artifactory_server, artifactory_auth):
"""ArtifactoryPath with defined server URL and authentication"""
def f(uri):
return artifactory.ArtifactoryPath(
artifactory_server + uri, auth=artifactory_auth
)
return f | 1,771 |
def compile_gif():
"""Сделать кино"""
utils.compile_gif('./current/map_thumbnails/') | 1,772 |
def get_figure_of_dataframe_contrasting_prof_with_other_profs(dict_cursor, ax, instructorFullName, cID, campus):
"""
Plot the prof vs other profs DataFrame in python.
"""
df = __get_dataframe_by_contrasting_prof_with_other_profs(dict_cursor, instructorFullName, cID, campus)
__get_figure_by_dataframe... | 1,773 |
def get_outmost_polygon_boundary(img):
"""
Given a mask image with the mask describes the overlapping region of
two images, get the outmost contour of this region.
"""
mask = get_mask(img)
mask = cv2.dilate(mask, np.ones((2, 2), np.uint8), iterations=2)
cnts, hierarchy = cv2.findContours(
... | 1,774 |
def update_uploaded_file_with_log(project, path_to_log_file):
"""Update file details that weren't properly uploaded to db from cli log"""
import botocore
from dds_web.database import models
from dds_web import db
from dds_web.api.api_s3_connector import ApiS3Connector
import json
proj_in_db... | 1,775 |
def on_save_handler(model_class, instance, created):
"""Hash password on creation/save."""
# If password changed then it won't start with hash's method prefix
is_password_changed = not instance.password.startswith('pbkdf2:sha256')
if created or is_password_changed:
instance.password = generate_... | 1,776 |
def check_ddp_wrapped(model: nn.Module) -> bool:
"""
Checks whether model is wrapped with DataParallel/DistributedDataParallel.
"""
parallel_wrappers = nn.DataParallel, nn.parallel.DistributedDataParallel
# Check whether Apex is installed and if it is,
# add Apex's DistributedDataParallel to li... | 1,777 |
def adminRecords(request):
"""
管理租赁记录
:param request:
:return: html page
"""
token = request.COOKIES.get('admintoken')
if token is None:
return redirect('/adminLogin/')
result = MysqlConnector.get_one('YachtClub', 'select adminname from admincookies where token=%s', token)
if... | 1,778 |
def _make_fold(draw):
"""
Helper strategy for `test_line_fold` case.
The shape of the content will be the same every time:
a
b
c
But the chars and size of indent, plus trailing whitespace on each line
and number of line breaks will all be fuzzed.
"""
return (
draw... | 1,779 |
def temporary_namespace(locals_ref, keep: T.List[str] = []):
"""Temporary Namespace within ``with`` statement.
1. copies current namespace from `locals_ref`
2. Enters ``with`` statement
3. restores original namespace except those specified in `keep`
Parameters
----------
module : module
... | 1,780 |
def get_user_jwt() -> str:
"""
Returns:
str: The JWT token of the user
"""
login_data = check_login()
if not login_data:
token = requests.get(
'https://formee-auth.hackersreboot.tech/visitor').json()['token']
return token
if login_data:
token = reques... | 1,781 |
def auth(driver, cred):
"""
Method for automating login procedure
"""
try:
ele_un = driver.find_element_by_xpath("//input[@id='ft_un']")
ele_un.send_keys(cred['username'])
ele_pd = driver.find_element_by_xpath("//input[@id='ft_pd']")
ele_pd.send_keys(cred['password'])
... | 1,782 |
def nostdout():
"""Kill standart output.
Example
-------
>> with nostdout():
raw = mne.io.Raw(fname)
"""
# -- Works both in python2 and python3 -- #
try:
from io import StringIO
except ImportError:
from io import StringIO
# -------------------------------... | 1,783 |
def site_pressure(dset):
"""Get atmospheric pressure from local site measurements
If local atmospheric pressure measurements on a site are not available an alternative model given in configuration
file is used to determine atmospheric pressure.
TODO:
So far only gridded VMF1 model is used, if ... | 1,784 |
def write_clean_po(filename, catalog):
"""Writes out a .po file in a canonical way, to minimize spurious diffs."""
catalog.creation_date = datetime.datetime(2000, 1, 1, 0, 0, 0)
file = open(filename, 'w')
pofile.write_po(file, catalog,
no_location=True, sort_output=True, ignore_obsol... | 1,785 |
def add_gradient_penalty(critic, C_input_gp, C_input_fake):
"""Helper Function: Add gradient penalty to enforce Lipschitz continuity
Interpolates = Real - alpha * ( Fake - Real )
Parameters
----------
critic : tf.Sequential
Critic neural network
C_input_gp : np.matrix
Criti... | 1,786 |
def parse_airomon_datetime(airomon_dt: str) -> datetime:
"""Parse string used by airomon and also make timezone aware."""
aileen_tz = pytz.timezone(settings.TIME_ZONE)
try:
dt: datetime = datetime.strptime(airomon_dt, "%Y-%m-%d %H:%M:%S")
dt = dt.astimezone(aileen_tz)
except ValueError:
... | 1,787 |
def i2nm(i):
"""
Return the n and m orders of the i'th zernike polynomial
========= == == == == == == == == == == == == == == == ===
i 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 ...
n-order 0 1 1 2 2 2 3 3 3 3 4 4 4 4 4 ...
m-order 0 -1 1 -2 0 2 -3 -1 1 3 -4 -... | 1,788 |
def update_ftov_msgs(
ftov_msgs: jnp.ndarray, updates: Dict[Any, jnp.ndarray], fg_state: FactorGraphState
) -> jnp.ndarray:
"""Function to update ftov_msgs.
Args:
ftov_msgs: A flat jnp array containing ftov_msgs.
updates: A dictionary containing updates for ftov_msgs
fg_state: Facto... | 1,789 |
def dict_list_add(d, k, v):
"""
Convenience function to create empty list in dictionary if not
already present under that key
"""
if k in d:
d[k].append(v)
else:
d[k] = [v] | 1,790 |
def normalize_features(
current: np.ndarray,
previous: Optional[np.ndarray],
normalize_samples: int,
method: str = NORM_METHODS.MEAN.value,
clip: bool = False,
) -> tuple[np.ndarray, np.ndarray]:
"""Normalize features with respect to the past number of normalize_samples.
Parameters
----... | 1,791 |
def anim(filename, rows: int, cols: int ,
frame_duration: float = 0.1, loop=True) -> Animation:
"""Create Animation object from image of regularly arranged subimages.
+filename+ Name of file in resource directory of image of subimages
regularly arranged over +rows+ rows and +cols+ column... | 1,792 |
def show_umbrella_plot(profileFilename, histogramFilename):
"""Muestra el gráfico del perfil y los histogramas en el mismo gráfico. Útil para determinar
si al cálculo le faltan ventanas."""
figure = plt.figure()
histogramsData = parseXVG(histogramFilename)
histoPlot = figure.add_subplot(111)
fo... | 1,793 |
def process_summary(article):
"""Ensures summaries are not cut off. Also inserts
mathjax script so that math will be rendered"""
summary = article._get_summary()
summary_parsed = BeautifulSoup(summary, 'html.parser')
math = summary_parsed.find_all(class_='math')
if len(math) > 0:
last_... | 1,794 |
def filter_factory(global_conf, **local_conf):
"""Standard filter factory to use the middleware with paste.deploy"""
register_swift_info('vertigo')
conf = global_conf.copy()
conf.update(local_conf)
vertigo_conf = dict()
vertigo_conf['devices'] = conf.get('devices', '/srv/node')
vertigo_co... | 1,795 |
def get_review_score_fields(call, proposals):
"""Return a dictionary of the score banner fields in the reviews.
Compute the score means and stdevs. If there are more than two score
fields, then also compute the mean of the means and the stdev of the means.
This is done over all finalized reviews for eac... | 1,796 |
def vThreadFunction():
"""Function to do CPU-bound work.
Args:
Returns:
"""
iResult = 0
for iCnt in range(50000000):
iResult += iCnt | 1,797 |
def test_simple_profiler_iterable_durations(simple_profiler, action, expected):
"""Ensure the reported durations are reasonably accurate."""
iterable = _sleep_generator(expected)
for _ in simple_profiler.profile_iterable(iterable, action):
pass
# we exclude the last item in the recorded durati... | 1,798 |
def tokenize(text):
"""
Tokenize and normalize
"""
tokens = nltk.word_tokenize(text)
lemmatizer = nltk.WordNetLemmatizer()
clean_tokens = [lemmatizer.lemmatize(w).lower().strip() for w in tokens]
return clean_tokens | 1,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.