content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def _get_distance(captcha_url):
"""
获取缺口距离
:param captcha_url: 验证码 url
:return:
"""
save_path = os.path.abspath('...') + '\\' + 'images'
if not os.path.exists(save_path):
os.mkdir(save_path)
img_path = _pic_download(captcha_url, 'captcha')
img1 = cv2.imread(img_path, 0)
... | 5,358,800 |
def obj_to_solids(filename: str, material_path: str = "", scale=64):
"""
Turns an .obj file to VMF solids, **BETA** it's very finicky and remember to invert normals
:param filename: The name of the .obj file with path (ex: "test/wall.obj")
:param material_path: The path to the .VMT's using same names (... | 5,358,801 |
def adjust_contrast(img, contrast_factor):
"""Adjust contrast of an Image.
Args:
img (PIL Image): PIL Image to be adjusted.
contrast_factor (float): How much to adjust the contrast. Can be any
non negative number. 0 gives a solid gray image, 1 gives the
original image while 2 increases the contr... | 5,358,802 |
def write_configurations(root, ra, dec, center, obs_obj, obs_ap, obs_mode, objid=None, path=None,
ndig=None, tight=False, target_file=None, ra_c=None, dec_c=None):
"""
Write a set of configuration files for each FOBOS observation.
Args:
root (:obj:`str`):
The ro... | 5,358,803 |
def matplotlib_view(gviz: Digraph):
"""
Views the diagram using Matplotlib
Parameters
---------------
gviz
Graphviz
"""
return gview.matplotlib_view(gviz) | 5,358,804 |
def shuf_repeat(lst, count):
""" Xiaolong's code expects LMDBs with the train list shuffled and
repeated, so creating that here to avoid multiple steps. """
final_list = []
ordering = range(len(lst))
for _ in range(count):
np.random.shuffle(ordering)
final_list += [lst[i] for i in or... | 5,358,805 |
def create_query(table_name, schema_dict):
"""
see datatypes documentation here:
https://www.postgresql.org/docs/11/datatype.html
"""
columns = db_schema[table_name]
return (
f"goodbooks_{table_name}",
[f"{column} {value}" for column, value in columns.items()],
) | 5,358,806 |
def new_document(
source_path: str, settings: Any = None
) -> Tuple[nodes.document, JSONReporter]:
"""Return a new empty document object.
Replicates ``docutils.utils.new_document``, but uses JSONReporter,
which is also returned
Parameters
----------
source_path : str
The path to or... | 5,358,807 |
def delete_repleciation(zfssrcfs, repel_uuid):
"""ZFS repleciation action status
accepts: An exsistng ZFS action uuid (id).
returns: the ZFS return status code.
"""
r = requests.delete(
"%s/api/storage/v1/replication/actions/%s"
% (url, repel_uuid), auth=zfsauth, verify=False,
... | 5,358,808 |
def load_checkpoint(
neox_args, model, optimizer, lr_scheduler, inference=False, iteration=None
):
"""Load a model checkpoint and return the iteration."""
if neox_args.deepspeed:
load_optim_and_scheduler = (
not neox_args.no_load_optim
) # TODO: These should be configured by... | 5,358,809 |
def test_move_zeros(source, result):
"""Test that function move_zeros returns a list matching result."""
from move_zeros import move_zeros
assert move_zeros(source) == result | 5,358,810 |
def get_logged_in_session(websession: aiohttp.ClientSession) -> RenaultSession:
"""Get initialised RenaultSession."""
return RenaultSession(
websession=websession,
country=TEST_COUNTRY,
locale_details=TEST_LOCALE_DETAILS,
credential_store=get_logged_in_credential_store(),
) | 5,358,811 |
def __getStationName(name, id):
"""Construct a station name."""
name = name.replace("Meetstation", "")
name = name.strip()
name += " (%s)" % id
return name | 5,358,812 |
def processVideo(event):
"""Function get called on left click event on the button 'calculateVideo'
Opens a browse dialog to select the video file.
Processes the video for detecting faces and making predictions on them by loading the trained models.
Displays the result in the GUI.
:param ... | 5,358,813 |
def load_hist(path):
"""
load spatial histogram
"""
# load all hist properties
logpYX = io.loadmat(os.path.join(path, 'logpYX'))['value']
xlab = io.loadmat(os.path.join(path, 'xlab'))['value']
ylab = io.loadmat(os.path.join(path, 'ylab'))['value']
rg_bin = io.loadmat(os.path.join(path, '... | 5,358,814 |
def test_default_reporter():
"""By default, `reader` isn't implemented."""
assert TestReporter().reader() is None
# close does nothing by default.
assert TestReporter().close() is None | 5,358,815 |
def post_url(url):
"""Post url argument type
:param str url: the post url
:rtype: str
:returns: the post url
"""
url = url.strip()
if len(url) == 0:
raise ArgumentTypeError("A url is required")
elif len(url) > Url.URL_LENGTH:
raise ArgumentTypeError("The url length is o... | 5,358,816 |
def create_planner(request):
"""Create a new planner and redirect to new planner page."""
user = request.user
plan = Plan.objects.create(author=user)
plan.save()
return HttpResponseRedirect(reverse('planner:edit_plan', args=[plan.id], )) | 5,358,817 |
def rule_matching_evaluation(df, model, seed_num, rein_num, eval_num, label_map, refer_label, lime_flag=True, scan_flag=False
, content_direction='forward', xcol_name='text', n_cores=20):
"""A integrated rule extraction, refinement and validation process.
On the dataset, sample base... | 5,358,818 |
def login():
""" Typical login page """
# if current user is already logged in, then don't log in again
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first... | 5,358,819 |
def create_timeperiod_map(
start: spec.Timestamp = None,
end: spec.Timestamp = None,
length: spec.Timelength = None,
) -> spec.TimeperiodMap:
"""create Timeperiod with representation TimeperiodMap
## Inputs
- start: Timestamp
- end: Timestamp
- length: Timelength
## Returns
- T... | 5,358,820 |
async def patched_send_async(self, *args, **kwargs):
"""Patched send function that push to queue idx of server to which request is routed."""
buf = args[0]
if buf and len(buf) >= 6:
op_code = int.from_bytes(buf[4:6], byteorder=PROTOCOL_BYTE_ORDER)
# Filter only caches operation.
if 1... | 5,358,821 |
def add_event_records(df, event_type, event_date):
"""Add event records for the event type."""
log(f'Adding {DATASET_ID} event records for {event_type}')
this_year = datetime.now().year
df = df.loc[df[event_date].notnull(), :].copy()
df['event_id'] = db.create_ids(df, 'events')
df['dataset_id'] ... | 5,358,822 |
def _renderPath(path,drawFuncs,countOnly=False,forceClose=False):
"""Helper function for renderers."""
# this could be a method of Path...
points = path.points
i = 0
hadClosePath = 0
hadMoveTo = 0
active = not countOnly
for op in path.operators:
if op == _MOVETO:
if f... | 5,358,823 |
def save_depth_image(filename, depth, filetype=None):
"""
Saves a depth image (e.g. generated by render module) to an image file.
We support several file types:
'tum', saving as '.png', depth values are multiplied with 5000 and stored as uint16 (see
https://vision.in.tum.de/data/datasets/rgbd-datase... | 5,358,824 |
def _search_focus(s, code=None):
""" Search for a particular module / presentation.
The search should return only a single item. """
if not code:
code = input("Module code (e.g. TM129-17J): ")
results = _search_by_code(s, code)
if not len(results):
print('Nothing found for "{}"... | 5,358,825 |
def visualize(image, pred, label=None):
"""make visualization"""
n_plot = 2 if label is None else 3
fig = plt.figure()
ax = fig.add_subplot(1, n_plot, 1)
imgplot = plt.imshow(image)
ax.set_title('Image')
ax = fig.add_subplot(1, n_plot, 2)
imgplot = plt.imshow(pred)
ax.set_title('Pred... | 5,358,826 |
def parse_line(sample):
"""Parse an ndjson line and return ink (as np array) and classname."""
class_name = sample["word"]
inkarray = sample["drawing"]
stroke_lengths = [len(stroke[0]) for stroke in inkarray]
total_points = sum(stroke_lengths)
np_ink = np.zeros((total_points, 3), dtype=np.float3... | 5,358,827 |
def test_missing(metafunc, fixtures: Dict[str, Any]) -> None:
"""Test function not found among fixtures, don't parametrize."""
generate_tests(metafunc, fixtures) # act
metafunc.parametrize.assert_not_called() | 5,358,828 |
def evalRPN(self, tokens):
# ! 求解逆波兰式,主要利用栈
"""
:type tokens: List[str]
:rtype: int
"""
stack = []
for item in tokens:
# print(stack)
if item.isdigit():
stack.append(int(item))
if item[0] == '-' and len(item) > 1 and item[1:].isdigit():
... | 5,358,829 |
def _validate_flags():
"""Returns True if flag values are valid or prints error and returns False."""
if FLAGS.list_ports:
print("Input ports: '%s'" % (
"', '".join(midi_hub.get_available_input_ports())))
print("Ouput ports: '%s'" % (
"', '".join(midi_hub.get_available_output_ports())))
... | 5,358,830 |
def proto_test(test):
"""
If test is a ProtoTest, I just return it. Otherwise I create a ProtoTest
out of test and return it.
"""
if isinstance(test, ProtoTest):
return test
else:
return ProtoTest(test) | 5,358,831 |
def get_frames():
"""Get frames for an episode
Params:
episode: int
The episode for which the frames shall be returned
Returns:
frames: dict
The frames for an episode per timestep
"""
episode = int(request.args.get('user'))
frames = data_preprocessor.get_f... | 5,358,832 |
async def _(message: Message):
"""
Reacts to all messages containing
"hi" or "hello" and ignores text case
"""
await message.answer("Hi!") | 5,358,833 |
def remove(s1,s2):
"""
Returns a copy of s, with all characters in s2 removed.
Examples:
remove('abc','ab') returns 'c'
remove('abc','xy') returns 'abc'
remove('hello world','ol') returns 'he wrd'
Parameter s1: the string to copy
Precondition: s1 is a string
Parameter ... | 5,358,834 |
def get_firebase_db_url():
"""Grabs the databaseURL from the Firebase config snippet. Regex looks
scary, but all it is doing is pulling the 'databaseURL' field from the
Firebase javascript snippet"""
regex = re.compile(r'\bdatabaseURL\b.*?["\']([^"\']+)')
cwd = os.path.dirname(__file__)
try:
... | 5,358,835 |
def get_issuer_plan_ids(issuer):
"""Given an issuer id, return all of the plan ids registered to that issuer."""
df = pd.read_csv(PATH_TO_PLANS)
df = df[df.IssuerId.astype(str) == issuer]
return set(df.StandardComponentId.unique()) | 5,358,836 |
def add_args(parser):
"""
parser : argparse.ArgumentParser
return a parser added with args required by fit
"""
# Training settings
parser.add_argument('--model', type=str, default='mobilenet', metavar='N',
help='neural network used in training')
parser.add_argument('... | 5,358,837 |
def parse_coords(lines):
"""Parse skbio's ordination results file into coords, labels, eigvals,
pct_explained.
Returns:
- list of sample labels in order
- array of coords (rows = samples, cols = axes in descending order)
- list of eigenvalues
- list of percent variance explained
F... | 5,358,838 |
def run_ann(model, train, test, params_save_path, iteration, optimizer, loss, callbacks=None, valid=None,
shuffle_training=True,
batch_size=16,
num_epochs=30):
"""
Run analog network with cross-validation
:param batch_size: batch size during training
:param model: ref... | 5,358,839 |
def get_sector(session, sector_name=None, sector_id=None):
""" Get a sector by it's name or id. """
return get_by_name_or_id(session, Sector, model_id=sector_id, name=sector_name) | 5,358,840 |
def is_valid_pre_6_2_version(xml):
"""Returns whether the given XML object corresponds to an XML output file of Quantum ESPRESSO pw.x pre v6.2
:param xml: a parsed XML output file
:return: boolean, True when the XML was produced by Quantum ESPRESSO with the old XML format
"""
element_header = xml.f... | 5,358,841 |
def set_log_level(loglevel_str):
""" change log level for all handlers """
if loglevel_str == ps.LOG_DEBUG:
level = logging.DEBUG
elif loglevel_str == ps.LOG_INFO:
level = logging.INFO
elif loglevel_str == ps.LOG_ERROR:
level = logging.ERROR
elif type(loglevel_str) not in (st... | 5,358,842 |
def exportdf (df =None, refout:str =None, to:str =None, savepath:str =None,
modname:str ='_wexported_', reset_index:bool =True):
"""
Export dataframe ``df`` to `refout` files. `refout` file can
be Excell sheet file or '.json' file. To get more details about
the `writef` decorator , ... | 5,358,843 |
def invalidate_enrollment_mode_cache(sender, instance, **kwargs): # pylint: disable=unused-argument
"""
Invalidate the cache of CourseEnrollment model.
"""
cache_key = CourseEnrollment.cache_key_name(
instance.user.id,
str(instance.course_id)
)
cache.delete(cache_key) | 5,358,844 |
def clean_bpoa_seniority_list(csv):
"""Clean a digitized BPOA seniority list."""
dirty = pd.read_csv(csv)
clean = pd.DataFrame()
clean["job_title"] = dirty["Rank"]
clean["last_name"] = dirty["Last name"]
clean["first_name"] = dirty["First Name"]
clean = clean.apply(correct_name, axis=1)
... | 5,358,845 |
def create_profiling_group(agentOrchestrationConfig=None, clientToken=None, profilingGroupName=None):
"""
Creates a profiling group.
See also: AWS API Documentation
Exceptions
:example: response = client.create_profiling_group(
agentOrchestrationConfig={
'profilingEnabl... | 5,358,846 |
def topk(table, metric, dimensions, is_asc, k, **kwargs):
""" This function returns both the results according to the intent
as well as the debiasing suggestions.
Some of the oversights considered in this intent are-
1. Regression to the mean
2. Looking at tails to find causes - TODO
Args:
... | 5,358,847 |
def addMovieElement(findings, data):
""" Helper Function which handles unavailable information for each movie"""
if len(findings) != 0:
data.append(findings[0])
else:
data.append("")
return data | 5,358,848 |
def get_bst_using_min_and_max_value(preorder: list) -> Node:
"""
time complexity: O(n)
space complexity: O(n)
"""
def construct_tree(min_: int, max_: int) -> Optional[Node]:
nonlocal pre_index
nonlocal l
if pre_index >= l:
return None
value = preorder[p... | 5,358,849 |
def qwtStepSize(intervalSize, maxSteps, base):
"""this version often doesn't find the best ticks: f.e for 15: 5, 10"""
minStep = divideInterval(intervalSize, maxSteps, base)
if minStep != 0.0:
# # ticks per interval
numTicks = math.ceil(abs(intervalSize / minStep)) - 1
# Do the min... | 5,358,850 |
def get_trimmed_glyph_name(gname, num):
"""
Glyph names cannot have more than 31 characters.
See https://docs.microsoft.com/en-us/typography/opentype/spec/...
recom#39post39-table
Trims an input string and appends a number to it.
"""
suffix = '_{}'.format(num)
return gname[:31... | 5,358,851 |
def vmatrix(ddir, file_prefix):
""" generate vmatrix DataFile
"""
name = autofile.name.vmatrix(file_prefix)
writer_ = autofile.write.vmatrix
reader_ = autofile.read.vmatrix
return factory.DataFile(ddir=ddir, name=name,
writer_=writer_, reader_=reader_) | 5,358,852 |
def fetch_precision_overlay(precision):
"""
Returns the overlay for the given precision value as cv2 image.
"""
overlay_folder = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'../assets/precision_overlays'
)
img_path = os.path.join(
overlay_folder, f'overla... | 5,358,853 |
def write_dataframe_to_file(df, outpath, **kwargs):
"""
Write a pandas database as a csv to provided file path
Input:
df: pandas dataframe
outpath: path to write csv
Ouput: None
Raises:
FileNotFoundError: if not a valid path
"""
try:
df.to_csv(outpath, **kwar... | 5,358,854 |
def get_argument_sources(
source: Source,
node: ast.Call,
func: Callable,
vars_only: bool,
pos_only: bool
) -> MutableMapping[str, Union[ast.AST, str]]:
"""Get the sources for argument from an ast.Call node
>>> def func(a, b, c, d=4):
>>> ...
>>> x = y = z = 1
... | 5,358,855 |
def cli(src, dest, dry_run):
"""Move all empty videos to a folder specified by the user.
Args:
src: Path that must already exist with the videos to process
dest: Path, where to dump the files
dry_run: boolean
"""
os.makedirs(dest, exist_ok=True)
for src_file in os.listdir(s... | 5,358,856 |
def test_config_valid():
"""
Test invalid values for all relevant fields individually.
This tests values, that are supposed to fail.
This does not test if no valid configuration is marked
as invalid!
"""
with pytest.raises(Exception):
config_valid(dummy_config)
valid = dummy_con... | 5,358,857 |
def set_icons(icons_light, icons_dark, light=False):
""" Set icon theme with plasma-changeicons for light and dark schemes
Args:
icons_light (str): Light mode icon theme
icons_dark (str): Dark mode icon theme
light (bool): wether using light or dark mode
"""
if light and icons_l... | 5,358,858 |
def _register_dataset(service, dataset, compression):
"""Registers a dataset with the tf.data service.
This transformation is similar to `register_dataset`, but supports additional
parameters which we do not yet want to add to the public Python API.
Args:
service: A string or a tuple indicating how to con... | 5,358,859 |
def rivers_by_station_number(stations,N):
"""function that uses stations_by_rivers to return a dictionary that it then
itterates each river for, summing the number of stations on the river into tuples"""
stationsOfRivers = stations_by_rivers(stations)
listOfNumberStations = []
for river in stationsO... | 5,358,860 |
def generate_itoa_dict(
bucket_values=[-0.33, 0, 0.33], valid_movement_direction=[1, 1, 1, 1]):
"""
Set cartesian product to generate action combination
spaces for the fetch environments
valid_movement_direction: To set
"""
action_space_extended = [bucket_values if m == 1 else [0]
... | 5,358,861 |
def try_convert(value, datetime_to_ms=False, precise=False):
"""Convert a str into more useful python type (datetime, float, int, bool), if possible
Some precision may be lost (e.g. Decimal converted to a float)
>>> try_convert('false')
False
>>> try_convert('123456789.123456')
123456789.12345... | 5,358,862 |
def signin(request, auth_form=AuthenticationForm,
template_name='accounts/signin_form.html',
redirect_field_name=REDIRECT_FIELD_NAME,
redirect_signin_function=signin_redirect, extra_context=None):
"""
Signin using email or username with password.
Signs a user in by combinin... | 5,358,863 |
def run_syncdb(database_info):
"""Make sure that the database tables are created.
database_info -- a dictionary specifying the database info as dictated by Django;
if None then the default database is used
Return the identifier the import process should use.
"""
django.setup()
... | 5,358,864 |
def autoclean_cv(training_dataframe, testing_dataframe, drop_nans=False, copy=False,
encoder=None, encoder_kwargs=None, ignore_update_check=False):
"""Performs a series of automated data cleaning transformations on the provided training and testing data sets
Unlike `autoclean()`, this function... | 5,358,865 |
def make_coroutine_from_tree(tree, filename="<aexec>", symbol="single",
local={}):
"""Make a coroutine from a tree structure."""
dct = {}
tree.body[0].args.args = list(map(make_arg, local))
exec(compile(tree, filename, symbol), dct)
return asyncio.coroutine(dct[CORO_NAME... | 5,358,866 |
def update_Sigmai(Yi, Es, Vars):
"""
Return new Sigma_i: shape k
"""
return np.mean((Yi - Es) ** 2, axis=1) + np.mean(Vars, axis=1) | 5,358,867 |
def gather_indexes(sequence_tensor, positions):
"""Gathers the vectors at the specific positions over a minibatch."""
sequence_shape = get_shape_list(sequence_tensor, expected_rank=3)
batch_size = sequence_shape[0]
seq_length = sequence_shape[1]
width = sequence_shape[2]
flat_offsets = tf.resha... | 5,358,868 |
def check_sub_schema_dict(sub_schema: typing.Any) -> dict:
"""Check that a sub schema in an allOf is a dict."""
if not isinstance(sub_schema, dict):
raise exceptions.MalformedSchemaError(
"The elements of allOf must be dictionaries."
)
return sub_schema | 5,358,869 |
def get_proto_root(workspace_root):
"""Gets the root protobuf directory.
Args:
workspace_root: context.label.workspace_root
Returns:
The directory relative to which generated include paths should be.
"""
if workspace_root:
return "/{}".format(workspace_root)
else:
r... | 5,358,870 |
def test_two_models_restored(tmpdir):
"""
Test if one can ``_restore_model`` and use two ``BaseModels``.
This is regression test for issue #83 (One can not create and use more than one instance of ``BaseModel``).
"""
tmpdir2 = tempfile.mkdtemp()
model1 = TrainableModel(dataset=None, log_dir=tm... | 5,358,871 |
def bwimcp(J, K, x, tr=.2, alpha=.05):
"""
Multiple comparisons for interactions
in a split-plot design.
The analysis is done by taking difference scores
among all pairs of dependent groups and
determining which of
these differences differ across levels of Factor A
using trimmed means. ... | 5,358,872 |
def trigger_decoder(mode: str, trigger_path: str=None) -> tuple:
"""Trigger Decoder.
Given a mode of operation (calibration, copy phrase, etc) and
a path to the trigger location (*.txt file), this function
will split into symbols (A, ..., Z), timing info (32.222), and
targetness (target... | 5,358,873 |
def method_comparison(filename=None, extension="png", usetex=False,
passed_ax=None, **kwargs):
"""
Create a plot comparing how estimated redshift changes as a
function of dispersion measure for each DM-z relation.
Parameters
----------
filename: string or None, optional
... | 5,358,874 |
def logCompression(pilImg):
"""Does log compression processing on a photo
Args:
pilImg (PIL Image format image): Image to be processed
"""
npImg = PILtoNumpy(pilImg)
c = 255 / (np.log10(1 + np.amax(npImg)))
for all_pixels in np.nditer(npImg, op_flags=['readwrite']):
all_pixels[.... | 5,358,875 |
def quantile_turnover(quantile_factor, quantile, period=1):
"""
Computes the proportion of names in a factor quantile that were
not in that quantile in the previous period.
Parameters
----------
quantile_factor : pd.Series
DataFrame with date, asset and factor quantile.
quantile : i... | 5,358,876 |
def dmp_div(f, g, u, K):
"""
Polynomial division with remainder in ``K[X]``.
Examples
========
>>> from sympy.polys import ring, ZZ, QQ
>>> R, x,y = ring("x,y", ZZ)
>>> R.dmp_div(x**2 + x*y, 2*x + 2)
(0, x**2 + x*y)
>>> R, x,y = ring("x,y", QQ)
>>> R.dmp_div(x**2 + x*y, 2*x +... | 5,358,877 |
def do_auto_install(config_name: str,
app_name: str,
port: Optional[int],
hostname: Optional[str] = '') -> None:
"""Performs non-interactive IDE install"""
configs = get_run_configs(config_name)
if config_name in configs:
print(f'Config wi... | 5,358,878 |
def is_designated_holiday(timestamp):
"""
Returns True if the date is one of Piedmont’s "designated holidays":
- New Years Day (January 1st)
- Memorial Day (last Monday of May)
- Independence Day (July 4th)
- Labor Day (First Monday of September)
- Thanksgiving Day (4th Thursday in November)... | 5,358,879 |
def check(val, desc=None, as_warn=False) -> SimpleAssertions:
"""
function based assertion call
:param val: val to check
:param desc: optional, description of val
:param as_warn: if set, convert assertion error to warning message
:return: assertionClass
"""
return SimpleAssertions(as_wa... | 5,358,880 |
def ChangeExtension(filename, newExtension):
"""ChangeExtension(filename, newExtension) -> str
Replaces the extension of the filename with the given one.
If the given filename has no extension, the new extension is
simply appended.
arguments:
filename
string correspondi... | 5,358,881 |
def file_based_input_fn_builder(input_file,
seq_length,
is_training,
drop_remainder):
"""Creates an `input_fn` closure to be passed to TPUEstimator."""
name_to_features = {
"input_ids": tf.FixedLenFeature([se... | 5,358,882 |
def test_udp_syslog_get(client_sh_udp: object, log_directory: str) -> None:
"""Testing GET resource
Args:
client_sh_udp (fixture): The test client.
log_directory (fixture): The fully qualified path for the log directory.
"""
logfile: str = os.path.join(log_directory, 'syslog_server.log'... | 5,358,883 |
def synchrotron_thin_spectrum(freqs, ne, te, bfield):
"""Optically thin (unobsorbed) synchrotron spectrum.
Units of erg/cm^3/s/Hz
NY95b Eq 3.9
"""
const = 4.43e-30 # erg/cm^3/s/Hz
theta_e = K_BLTZ * te / (MELC * SPLC * SPLC)
v0 = QELC * bfield / (2*np.pi*MELC*SPLC)
xm = 2*freqs/(3*v0... | 5,358,884 |
def reset_monotonic_time(value=0.0):
"""
Make the monotonic clock return the real time on its next
call.
"""
global _current_time # pylint:disable=global-statement
_current_time = value | 5,358,885 |
def extract_val_setup(timestamp, lat, lon, dataPath = "Data/IceData/"):
""" Extracts a timestamped value from a NSIDC GeoTIFF File
Inputs:
timestamp = datetime struct of sample
lat = sample latitude
lon = sample longitude
dataPath = path to GeoTIFF files
Outputs:
... | 5,358,886 |
def create_message(service, to, subject, message_text):
"""Create a message for an email.
Args:
sender: Email address of the sender.
to: Email address of the receiver.
subject: The subject of the email message.
message_text: The text of the email message.
Returns:
An object c... | 5,358,887 |
def numpy_ndarray(nb_arr):
"""Return a copy of numba DeviceNDArray data as a numpy.ndarray.
"""
return nb_arr.copy_to_host() | 5,358,888 |
def convert_to_dict(my_keys, my_values):
"""Merge a given list of keys and a list of values into a dictionary.
Args:
my_keys (list): A list of keys
my_values (list): A list corresponding values
Returns:
Dict: Dictionary of the list of keys mapped to the list of values
"""
... | 5,358,889 |
def add_vit(request):
"""
Add a new vit with API, currently image and video are not supported
"""
user = KeyBackend().authenticate(request)
if request.method == "POST":
if request.user.is_authenticated:
form = VitForm(request.POST)
if form.is_valid():
... | 5,358,890 |
def long_control_state_trans(active, long_control_state, v_ego, v_target, v_pid,
output_gb, brake_pressed, cruise_standstill, min_speed_can):
"""Update longitudinal control state machine"""
stopping_target_speed = min_speed_can + STOPPING_TARGET_SPEED_OFFSET
stopping_condition = (v_eg... | 5,358,891 |
def load_group_to_namedtuple(group: h5py.Group):
"""Returns namedtuple with name of group and key: values of group attrs
e.g. srs1 group which has gpib: 1... will be returned as an srs1 namedtuple with .gpib etc
"""
# Check it was stored as a namedTuple
if group.attrs.get('description', None) != 'Na... | 5,358,892 |
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the g... | 5,358,893 |
def send_put(context):
"""
"""
headers = _get_request_headers(context)
params = _get_params(context)
context.response = context.session.put(
context.request_url,
headers=headers,
params=params,
json=context.request_json_payload
) | 5,358,894 |
def cycle(iterable):
"""Returns an infinite iterator by cycling through the iterable.
It's better than the itertools.cycle function because it resets to iterable each time it is
exhausted. This is useful when using cycling through the torch.utils.data.DataLoader object.
See https://stackoverflow.com/a... | 5,358,895 |
def export_json(blocks=None, subsections=False):
"""
Returns JSON representation of parsed config structure
:param blocks: List of blocks to export
:param subsections: Export all subblocks
:return: JSON-dumped string
"""
if blocks is not None:
blocks = [_canonicalize_blockid(b) for b... | 5,358,896 |
def get_dummy_message(text):
"""Get a dummy message with a custom text"""
return botogram.objects.messages.Message({
"message_id": 1,
"from": {"id": 123, "first_name": "Nobody"},
"chat": {"id": -123, "type": "chat", "title": "Something"},
"date": 1,
"text": text,
}) | 5,358,897 |
def test_use_callbacks() -> None:
"""Test the use_callbacks decorator."""
obj = Object()
obj.run_successful()
assert obj.result
obj.run_error()
assert isinstance(obj.error, Exception) | 5,358,898 |
def install_package(package_name, package_version, index_url, info, cache_path):
"""
Install packages based on the information we gather from the index_url page
"""
for item in info:
platform = item["platform"]
implementation = item["implementation"]
python_version = item["versio... | 5,358,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.