code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from matplotlib import cm
import numpy as np
import os
import contorno
from constantes import INTERVALOS, PASSOS, TAMANHO_BARRA, DELTA_T, DELTA_X
z_temp = contorno.p_3
TAMANHO_BARRA = 2
x = np.linspace(0.0, TAMANHO_BARRA, INTERVALOS+1)
y = np.lin... | [
"numpy.copy",
"numpy.asarray",
"numpy.linspace",
"matplotlib.pyplot.figure",
"numpy.meshgrid",
"matplotlib.pyplot.show"
] | [((264, 311), 'numpy.linspace', 'np.linspace', (['(0.0)', 'TAMANHO_BARRA', '(INTERVALOS + 1)'], {}), '(0.0, TAMANHO_BARRA, INTERVALOS + 1)\n', (275, 311), True, 'import numpy as np\n'), ((314, 351), 'numpy.linspace', 'np.linspace', (['(0.0)', 'DELTA_T', '(PASSOS + 1)'], {}), '(0.0, DELTA_T, PASSOS + 1)\n', (325, 351), ... |
import sys
from class_vis import prettyPicture
from prep_terrain_data import makeTerrainData
import matplotlib.pyplot as plt
import copy
import numpy as np
import pylab as pl
features_train, labels_train, features_test, labels_test = makeTerrainData()
########################## SVM ###################... | [
"sklearn.metrics.accuracy_score",
"prep_terrain_data.makeTerrainData",
"sklearn.svm.SVC"
] | [((247, 264), 'prep_terrain_data.makeTerrainData', 'makeTerrainData', ([], {}), '()\n', (262, 264), False, 'from prep_terrain_data import makeTerrainData\n'), ((437, 457), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""'}), "(kernel='linear')\n", (440, 457), False, 'from sklearn.svm import SVC\n'), ((784, 817),... |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import open
from builtins import str
from future import standard_library
standard_library.install_aliases()
try:
from queue i... | [
"signal.signal",
"billiard.Pool",
"future.standard_library.install_aliases",
"threading.Event",
"sys.exit",
"threading.Thread",
"Queue.Queue"
] | [((263, 297), 'future.standard_library.install_aliases', 'standard_library.install_aliases', ([], {}), '()\n', (295, 297), False, 'from future import standard_library\n'), ((2592, 2599), 'Queue.Queue', 'Queue', ([], {}), '()\n', (2597, 2599), False, 'from Queue import Queue, Empty\n'), ((3287, 3294), 'Queue.Queue', 'Qu... |
import datetime
import io
import json_tricks
import logging
import os
from os.path import (abspath, basename, dirname, exists, expanduser,
join, realpath, relpath, splitext)
import re
import shutil
import sys
from traits.api import (Any, Dict, Enum, HasTraits, Instance, List, Long,
... | [
"logging.getLogger",
"whoosh.fields.Schema",
"io.open",
"whoosh.qparser.QueryParser",
"traits.api.Enum",
"backports.csv.Sniffer",
"os.path.exists",
"whoosh.qparser.dateparse.DateParserPlugin",
"shutil.move",
"json_tricks.load",
"whoosh.util.times.long_to_datetime",
"os.path.expanduser",
"os.... | [((598, 625), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (615, 625), False, 'import logging\n'), ((791, 818), 'whoosh.fields.NUMERIC', 'fields.NUMERIC', ([], {'numtype': 'int'}), '(numtype=int)\n', (805, 818), False, 'from whoosh import fields, qparser, query\n'), ((827, 856), 'whoosh... |
"""This submodule contains a JSON reference translator."""
__author__ = '<NAME>'
__copyright__ = 'Copyright © 2021 <NAME>'
__license__ = 'MIT'
__all__ = ()
import prance.util.url as _url
def _reference_key(ref_url, item_path):
"""
Return a portion of the dereferenced URL.
format - ref-url_obj-path
... | [
"prance.util.url.fetch_url",
"prance.util.url.absurl",
"prance.util.iterators.reference_iterator",
"prance.util.url.split_url_reference",
"copy.deepcopy",
"prance.util.path.path_get",
"prance.util.url.urlresource"
] | [((1375, 1395), 'copy.deepcopy', 'copy.deepcopy', (['specs'], {}), '(specs)\n', (1388, 1395), False, 'import copy\n'), ((3372, 3441), 'prance.util.url.fetch_url', '_url.fetch_url', (['ref_url', 'self.__reference_cache'], {'strict': 'self.__strict'}), '(ref_url, self.__reference_cache, strict=self.__strict)\n', (3386, 3... |
from io import StringIO
from unittest import TestCase
from dropSQL.parser.streams import *
class StreamTestCase(TestCase):
def test(self):
s = '12'
cs = Characters(StringIO(s))
ch = cs.peek().ok()
self.assertEqual(ch, '1')
ch = cs.peek().ok()
self.assertEqual(ch,... | [
"io.StringIO"
] | [((187, 198), 'io.StringIO', 'StringIO', (['s'], {}), '(s)\n', (195, 198), False, 'from io import StringIO\n')] |
# coding: utf-8
import os
import pickle
import shutil
import tempfile
import unittest
import ray
from ray import tune
from ray.rllib import _register_all
from ray.tune import Trainable
from ray.tune.utils import validate_save_restore
class SerialTuneRelativeLocalDirTest(unittest.TestCase):
local_mode = True
... | [
"os.path.expanduser",
"os.path.exists",
"os.listdir",
"pickle.dump",
"ray.shutdown",
"os.path.join",
"pickle.load",
"pytest.main",
"os.path.isfile",
"os.path.isdir",
"tempfile.mkdtemp",
"ray.tune.utils.validate_save_restore",
"shutil.rmtree",
"os.path.abspath",
"ray.init",
"ray.tune.ru... | [((1131, 1191), 'ray.init', 'ray.init', ([], {'num_cpus': '(1)', 'num_gpus': '(0)', 'local_mode': 'self.local_mode'}), '(num_cpus=1, num_gpus=0, local_mode=self.local_mode)\n', (1139, 1191), False, 'import ray\n'), ((1387, 1401), 'ray.shutdown', 'ray.shutdown', ([], {}), '()\n', (1399, 1401), False, 'import ray\n'), ((... |
import numpy as np
import pytest
import theano
import theano.tensor as tt
# Don't import test classes otherwise they get tested as part of the file
from tests import unittest_tools as utt
from tests.gpuarray.config import mode_with_gpu, mode_without_gpu, test_ctx_name
from tests.tensor.test_basic import (
TestAll... | [
"tests.unittest_tools.seed_rng",
"theano.tensor.iscalar",
"theano.tensor.lscalar",
"numpy.random.rand",
"tests.gpuarray.config.mode_with_gpu.excluding",
"numpy.int32",
"theano.tensor.zeros_like",
"numpy.array",
"theano.gpuarray.type.gpuarray_shared_constructor",
"tests.unittest_tools.fetch_seed",
... | [((987, 1015), 'pytest.importorskip', 'pytest.importorskip', (['"""pygpu"""'], {}), "('pygpu')\n", (1006, 1015), False, 'import pytest\n'), ((1043, 1057), 'tests.unittest_tools.seed_rng', 'utt.seed_rng', ([], {}), '()\n', (1055, 1057), True, 'from tests import unittest_tools as utt\n'), ((1309, 1470), 'theano.function'... |
from pytube import YouTube
def download_video(watch_url):
yt = YouTube(watch_url)
(yt.streams
.filter(progressive=True, file_extension='mp4')
.order_by('resolution')
.desc()
.first()
.download())
| [
"pytube.YouTube"
] | [((68, 86), 'pytube.YouTube', 'YouTube', (['watch_url'], {}), '(watch_url)\n', (75, 86), False, 'from pytube import YouTube\n')] |
from easydict import EasyDict
hopper_ppo_default_config = dict(
env=dict(
env_id='HopperMuJoCoEnv-v0',
norm_obs=dict(use_norm=False, ),
norm_reward=dict(use_norm=False, ),
collector_env_num=8,
evaluator_env_num=10,
use_act_scale=True,
n_evaluator_episode=10,
... | [
"easydict.EasyDict"
] | [((1219, 1254), 'easydict.EasyDict', 'EasyDict', (['hopper_ppo_default_config'], {}), '(hopper_ppo_default_config)\n', (1227, 1254), False, 'from easydict import EasyDict\n'), ((1646, 1688), 'easydict.EasyDict', 'EasyDict', (['hopper_ppo_create_default_config'], {}), '(hopper_ppo_create_default_config)\n', (1654, 1688)... |
import os
from tempfile import TemporaryDirectory
from quickbase_client.utils.pywriting_utils import BasicPyFileWriter
from quickbase_client.utils.pywriting_utils import PyPackageWriter
class TestBasicFileWriter:
def test_outputs_lines(self):
w = BasicPyFileWriter()
w.add_line('import abc')
... | [
"tempfile.TemporaryDirectory",
"os.path.exists",
"quickbase_client.utils.pywriting_utils.PyPackageWriter",
"os.path.join",
"quickbase_client.utils.pywriting_utils.BasicPyFileWriter"
] | [((263, 282), 'quickbase_client.utils.pywriting_utils.BasicPyFileWriter', 'BasicPyFileWriter', ([], {}), '()\n', (280, 282), False, 'from quickbase_client.utils.pywriting_utils import BasicPyFileWriter\n'), ((484, 503), 'quickbase_client.utils.pywriting_utils.BasicPyFileWriter', 'BasicPyFileWriter', ([], {}), '()\n', (... |
from functools import partial
from corpustools.corpus.classes import Word
from corpustools.symbolsim.edit_distance import edit_distance
from corpustools.symbolsim.khorsi import khorsi
from corpustools.symbolsim.phono_edit_distance import phono_edit_distance
from corpustools.symbolsim.phono_align import Aligner
from co... | [
"corpustools.corpus.classes.Word",
"functools.partial",
"corpustools.multiproc.score_mp"
] | [((2313, 2525), 'functools.partial', 'partial', (['neighborhood_density', 'corpus_context'], {'tierdict': 'tierdict', 'tier_type': 'tier_type', 'sequence_type': 'sequence_type', 'algorithm': 'algorithm', 'max_distance': 'max_distance', 'collapse_homophones': 'collapse_homophones'}), '(neighborhood_density, corpus_conte... |
import gym
import numpy as np
from itertools import product
import matplotlib.pyplot as plt
def print_policy(Q, env):
""" This is a helper function to print a nice policy from the Q function"""
moves = [u'←', u'↓',u'→', u'↑']
if not hasattr(env, 'desc'):
env = env.env
dims = env.desc.shape
... | [
"matplotlib.pyplot.imshow",
"numpy.random.rand",
"matplotlib.pyplot.xticks",
"numpy.argmax",
"numpy.max",
"numpy.array",
"numpy.zeros",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.yticks",
"numpy.unravel_index",
"matplotlib.colors.Normalize",
"numpy.chararray",
"numpy.random.uniform",
"... | [((4710, 4735), 'gym.make', 'gym.make', (['"""FrozenLake-v0"""'], {}), "('FrozenLake-v0')\n", (4718, 4735), False, 'import gym\n'), ((4926, 4936), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4934, 4936), True, 'import matplotlib.pyplot as plt\n'), ((5035, 5045), 'matplotlib.pyplot.show', 'plt.show', ([], {... |
# Generated by Django 3.0.4 on 2020-07-14 11:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("core", "0026_auto_20200713_1535"),
("ai_lab", "0002_ailabusecase"),
]
operations = [
migrations.Cre... | [
"django.db.models.OneToOneField",
"django.db.models.ForeignKey"
] | [((463, 633), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""core.ArticlePage"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n... |
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# License: MIT. See LICENSE
import unittest
import frappe
from frappe.utils import set_request
from frappe.website.serve import get_response
test_dependencies = ["Blog Post"]
class TestWebsiteRouteMeta(unittest.TestCase):
def test_m... | [
"frappe.website.serve.get_response",
"frappe.db.rollback",
"frappe.get_all",
"frappe.utils.set_request",
"frappe.new_doc"
] | [((356, 469), 'frappe.get_all', 'frappe.get_all', (['"""Blog Post"""'], {'fields': "['name', 'route']", 'filters': "{'published': 1, 'route': ('!=', '')}", 'limit': '(1)'}), "('Blog Post', fields=['name', 'route'], filters={'published':\n 1, 'route': ('!=', '')}, limit=1)\n", (370, 469), False, 'import frappe\n'), (... |
# Lint as: python3
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless ... | [
"lingvo.compat.random.uniform",
"lingvo.compat.global_variables_initializer",
"lingvo.compat.zeros",
"lingvo.compat.test.main",
"lingvo.tasks.car.car_layers.SamplingAndGroupingLayer.Params",
"lingvo.compat.Graph",
"lingvo.core.py_utils.NestedMap"
] | [((3382, 3396), 'lingvo.compat.test.main', 'tf.test.main', ([], {}), '()\n', (3394, 3396), True, 'from lingvo import compat as tf\n'), ((1039, 1049), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (1047, 1049), True, 'from lingvo import compat as tf\n'), ((1616, 1649), 'lingvo.compat.global_variables_initializer'... |
# -*- coding: utf-8 -*-
# @Filename : take_snapshot.py
# @Date : 2019-07-15-13-44
# @Project: ITC-sniff-for-changes-in-directory
# @Author: <NAME>
# @Website: http://itcave.eu
# @Email: <EMAIL>
# @License: MIT
# @Copyright (C) 2019 ITGO <NAME>
# Generic imports
import os
import pickle
import re
import argparse
from d... | [
"pickle.dump",
"argparse.ArgumentParser",
"os.path.join",
"os.path.getmtime",
"datetime.datetime.now",
"re.sub",
"os.walk"
] | [((956, 975), 'os.walk', 'os.walk', (['sniff_path'], {}), '(sniff_path)\n', (963, 975), False, 'import os\n'), ((1934, 1990), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Directory Sniffer"""'}), "(description='Directory Sniffer')\n", (1957, 1990), False, 'import argparse\n'), ((1784, ... |
import sys
import pygame
from app_window import App_window
from button import Button
from snake import Snake
from food import Food
from settings import WIDTH, HEIGHT, FONT, BG_COL, QUIT_BUTTON_COLOUR, PLAY_BUTTON_COLOUR, BLACK, FPS, RED
class App:
def __init__(self):
pygame.init()
self.clock = pyg... | [
"app_window.App_window",
"snake.Snake",
"pygame.init",
"pygame.quit",
"sys.exit",
"pygame.event.get",
"pygame.display.set_mode",
"button.Button",
"pygame.time.Clock",
"pygame.display.update",
"food.Food",
"pygame.font.SysFont"
] | [((282, 295), 'pygame.init', 'pygame.init', ([], {}), '()\n', (293, 295), False, 'import pygame\n'), ((317, 336), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (334, 336), False, 'import pygame\n'), ((359, 399), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(WIDTH, HEIGHT)'], {}), '((WIDTH, HEI... |
from sklearn.metrics import f1_score,accuracy_score
import numpy as np
from utilities.tools import load_model
import pandas as pd
def predict_MSRP_test_data(n_models,nb_words,nlp_f,test_data_1,test_data_2,test_labels):
models=[]
n_h_features=nlp_f.shape[1]
print('loading the models...')
for i in range... | [
"numpy.mean",
"sklearn.metrics.f1_score",
"numpy.asarray",
"utilities.tools.load_model",
"pandas.DataFrame",
"sklearn.metrics.accuracy_score"
] | [((624, 641), 'numpy.asarray', 'np.asarray', (['preds'], {}), '(preds)\n', (634, 641), True, 'import numpy as np\n'), ((1052, 1091), 'pandas.DataFrame', 'pd.DataFrame', (["{'Quality': final_labels}"], {}), "({'Quality': final_labels})\n", (1064, 1091), True, 'import pandas as pd\n'), ((1657, 1674), 'numpy.asarray', 'np... |
# coding=utf-8
import logging
import traceback
from os import makedirs
from os.path import exists, join
from textwrap import fill
import matplotlib.patheffects as PathEffects
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
from koino.plot import big_square, default_alpha
from matplotlib import... | [
"matplotlib.patheffects.Normal",
"textwrap.fill",
"numpy.isfinite",
"numpy.arange",
"numpy.atleast_2d",
"os.path.exists",
"seaborn.color_palette",
"numpy.sort",
"matplotlib.pyplot.close",
"matplotlib.pyplot.savefig",
"seaborn.heatmap",
"matplotlib.pyplot.suptitle",
"traceback.format_exc",
... | [((534, 570), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(26, 10)'}), '(1, 2, figsize=(26, 10))\n', (546, 570), True, 'import matplotlib.pyplot as plt\n'), ((2652, 2768), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (["('Silhouette analysis for KMeans with n_clusters = %d' % n_clusters... |
import typing
nt = typing.NamedTuple("name", [("field", str)]) | [
"typing.NamedTuple"
] | [((20, 63), 'typing.NamedTuple', 'typing.NamedTuple', (['"""name"""', "[('field', str)]"], {}), "('name', [('field', str)])\n", (37, 63), False, 'import typing\n')] |
from CommonServerPython import *
''' IMPORTS '''
import re
import requests
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBALS/PARAMS '''
VENDOR = 'Have I Been Pwned? V2'
MAX_RETRY_ALLOWED = demisto.params().get('max_retry_time', -1)
API_KEY = demisto.params().get('api_key')
USE_SS... | [
"re.sub",
"requests.packages.urllib3.disable_warnings",
"requests.request",
"re.compile"
] | [((106, 150), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {}), '()\n', (148, 150), False, 'import requests\n'), ((2908, 2952), 're.compile', 're.compile', (['"""<a href="(.+?)"(.+?)>(.+?)</a>"""'], {}), '(\'<a href="(.+?)"(.+?)>(.+?)</a>\')\n', (2918, 2952), False, '... |
# Generated by Django 2.2.15 on 2021-01-29 20:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sitewebapp', '0010_auditionanswers_auditionquestions_audtionrounds_candidates'),
]
operations = [
migratio... | [
"django.db.migrations.DeleteModel",
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey"
] | [((999, 1043), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""audtionRounds"""'}), "(name='audtionRounds')\n", (1021, 1043), False, 'from django.db import migrations, models\n'), ((862, 983), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deleti... |
import inspect
import json
import os
import random
import subprocess
import time
import requests
import ast
import paramiko
import rancher
from rancher import ApiError
from lib.aws import AmazonWebServices
DEFAULT_TIMEOUT = 120
DEFAULT_MULTI_CLUSTER_APP_TIMEOUT = 300
CATTLE_TEST_URL = os.environ.get('CATTLE_TEST_URL'... | [
"rancher.Client",
"subprocess.check_output",
"json.loads",
"lib.aws.AmazonWebServices",
"paramiko.SSHClient",
"paramiko.AutoAddPolicy",
"os.environ.get",
"time.sleep",
"requests.get",
"os.path.realpath",
"inspect.getsource",
"time.time",
"random.randint"
] | [((288, 344), 'os.environ.get', 'os.environ.get', (['"""CATTLE_TEST_URL"""', '"""http://localhost:80"""'], {}), "('CATTLE_TEST_URL', 'http://localhost:80')\n", (302, 344), False, 'import os\n'), ((359, 396), 'os.environ.get', 'os.environ.get', (['"""ADMIN_TOKEN"""', '"""None"""'], {}), "('ADMIN_TOKEN', 'None')\n", (373... |
"""Bindings for the Barnes Hut TSNE algorithm with fast nearest neighbors
Refs:
References
[1] <NAME>, L.J.P.; Hinton, G.E. Visualizing High-Dimensional Data
Using t-SNE. Journal of Machine Learning Research 9:2579-2605, 2008.
[2] <NAME>, L.J.P. t-Distributed Stochastic Neighbor Embedding
http://homepage.tudelft.nl/19... | [
"ctypes.POINTER",
"numpy.require",
"pkg_resources.resource_filename",
"numpy.array",
"numpy.zeros",
"numpy.ctypeslib.ndpointer",
"ctypes.c_bool",
"ctypes.c_int",
"numpy.ctypeslib.load_library",
"ctypes.c_float"
] | [((3861, 3908), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['"""tsnecuda"""', '""""""'], {}), "('tsnecuda', '')\n", (3892, 3908), False, 'import pkg_resources\n'), ((4169, 4220), 'numpy.ctypeslib.load_library', 'N.ctypeslib.load_library', (['"""libtsnecuda"""', 'self._path'], {}), "('libtsne... |
from __future__ import absolute_import, division, print_function
import pytest
import json
import asyncio
import stripe
import urllib3
from stripe import six, util
from async_stripe.http_client import TornadoAsyncHTTPClient
pytestmark = pytest.mark.asyncio
VALID_API_METHODS = ("get", "post", "delete")
class Str... | [
"stripe.http_client.new_default_http_client",
"json.loads",
"pytest.raises",
"stripe.api_requestor._api_encode",
"pytest.fixture",
"stripe.http_client.warnings.simplefilter",
"asyncio.Future"
] | [((695, 723), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (709, 723), False, 'import pytest\n'), ((7283, 7311), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (7297, 7311), False, 'import pytest\n'), ((843, 893), 'stripe.http_client.warnings... |
import sys
from PyQt5 import QtGui
from PyQt5.QtCore import QEvent, QPoint, Qt
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import (QApplication, QDialog, QGroupBox, QMainWindow,
QTabWidget, QVBoxLayout, QWidget)
from sim2d_game_analyzer.fmdb_tab import FMDBTab
class MainWindow(QM... | [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QMainWindow.__init__",
"PyQt5.QtGui.QIcon",
"PyQt5.QtGui.QFont",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QTabWidget",
"PyQt5.QtWidgets.QVBoxLayout",
"sim2d_game_analyzer.fmdb_tab.FMDBTab"
] | [((1026, 1048), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (1038, 1048), False, 'from PyQt5.QtWidgets import QApplication, QDialog, QGroupBox, QMainWindow, QTabWidget, QVBoxLayout, QWidget\n'), ((464, 490), 'PyQt5.QtWidgets.QMainWindow.__init__', 'QMainWindow.__init__', (['self'... |
# pip install openpyxl
# pip install cuid
import os.path
import json
import datetime
from openpyxl import load_workbook
import cuid # https://github.com/necaris/cuid.py - create uuid's in the format that graphcool expects
SOURCE_XLSX = "./data/CLP_combined.xlsx"
EXTRACT_OUTPUT_DIR = "../server/extract"
SCHOOL_TITL... | [
"openpyxl.load_workbook",
"datetime.datetime.strptime",
"json.dumps",
"cuid.cuid",
"datetime.datetime.now"
] | [((4132, 4197), 'openpyxl.load_workbook', 'load_workbook', ([], {'filename': 'xlsx_file', 'read_only': '(True)', 'data_only': '(True)'}), '(filename=xlsx_file, read_only=True, data_only=True)\n', (4145, 4197), False, 'from openpyxl import load_workbook\n'), ((7045, 7086), 'datetime.datetime.strptime', 'datetime.datetim... |
import os
import sys
from lxml import html
import pathlib
import json
import m3u8
from seleniumwire import webdriver
from selenium.common.exceptions import TimeoutException, NoSuchElementException
from selenium.webdriver.firefox.options import Options as FirefoxOptions
IFRAME_CSS_SELECTOR = '.iframe-container>iframe'... | [
"pathlib.Path",
"m3u8.load",
"lxml.html.fromstring",
"json.dumps",
"os.path.isfile",
"seleniumwire.webdriver.Firefox",
"selenium.webdriver.firefox.options.Options",
"seleniumwire.webdriver.FirefoxProfile",
"json.load"
] | [((846, 875), 'os.path.isfile', 'os.path.isfile', (['self.m3ucache'], {}), '(self.m3ucache)\n', (860, 875), False, 'import os\n'), ((2466, 2485), 'm3u8.load', 'm3u8.load', (['m3u8_url'], {}), '(m3u8_url)\n', (2475, 2485), False, 'import m3u8\n'), ((3068, 3101), 'lxml.html.fromstring', 'html.fromstring', (['chanpage.con... |
import sys
import threading
import logging
import time
logger = logging.getLogger("interchange.strategy.base")
class BaseStrategy(object):
"""Implements threshold-interval based flow control.
The overall goal is to trap the flow of apps from the
workflow, measure it and redirect it the appropriate execu... | [
"logging.getLogger",
"threading.Thread",
"threading.Event",
"time.time"
] | [((65, 111), 'logging.getLogger', 'logging.getLogger', (['"""interchange.strategy.base"""'], {}), "('interchange.strategy.base')\n", (82, 111), False, 'import logging\n'), ((1985, 2002), 'threading.Event', 'threading.Event', ([], {}), '()\n', (2000, 2002), False, 'import threading\n'), ((2026, 2096), 'threading.Thread'... |
import torch
import torchvision
import matplotlib
import matplotlib.pyplot as plt
from PIL import Image
from captum.attr import GuidedGradCam, GuidedBackprop
from captum.attr import LayerActivation, LayerConductance, LayerGradCam
from data_utils import *
from image_utils import *
from captum_utils import *
import nump... | [
"torch.LongTensor",
"torch.from_numpy",
"matplotlib.cm.jet",
"visualizers.GradCam",
"matplotlib.pyplot.imshow",
"numpy.max",
"matplotlib.pyplot.axis",
"captum.attr.GuidedGradCam",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.title",
"captum.attr.LayerAttribution.int... | [((699, 748), 'torchvision.models.squeezenet1_1', 'torchvision.models.squeezenet1_1', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (731, 748), False, 'import torchvision\n'), ((754, 763), 'visualizers.GradCam', 'GradCam', ([], {}), '()\n', (761, 763), False, 'from visualizers import GradCam\n'), ((870, 889), ... |
from itertools import product
import numpy as np
import pytest
from alibi_detect.utils.discretizer import Discretizer
x = np.random.rand(10, 4)
n_features = x.shape[1]
feature_names = [str(_) for _ in range(n_features)]
categorical_features = [[], [1, 3]]
percentiles = [list(np.arange(25, 100, 25)), list(np.arange(10... | [
"alibi_detect.utils.discretizer.Discretizer",
"itertools.product",
"numpy.random.rand",
"numpy.arange"
] | [((123, 144), 'numpy.random.rand', 'np.random.rand', (['(10)', '(4)'], {}), '(10, 4)\n', (137, 144), True, 'import numpy as np\n'), ((346, 388), 'itertools.product', 'product', (['categorical_features', 'percentiles'], {}), '(categorical_features, percentiles)\n', (353, 388), False, 'from itertools import product\n'), ... |
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 14 11:49:43 2021
@author: Andres
"""
import sys,time
import unittest
from tinc import *
class ParameterSpaceTest(unittest.TestCase):
def test_parameter(self):
p1 = Parameter("param1")
p2 = Parameter("param2")
ps = ParameterSpace("ps")
... | [
"unittest.main"
] | [((3345, 3360), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3358, 3360), False, 'import unittest\n')] |
# Created by <NAME> on 8/28/19
import gym
import numpy as np
import torch
from interpretable_ddts.agents.ddt_agent import DDTAgent
from interpretable_ddts.agents.mlp_agent import MLPAgent
from interpretable_ddts.opt_helpers.replay_buffer import discount_reward
import torch.multiprocessing as mp
import argparse
import c... | [
"torch.manual_seed",
"argparse.ArgumentParser",
"interpretable_ddts.agents.mlp_agent.MLPAgent",
"interpretable_ddts.opt_helpers.replay_buffer.discount_reward",
"random.seed",
"numpy.sum",
"numpy.random.seed",
"interpretable_ddts.agents.ddt_agent.DDTAgent",
"torch.multiprocessing.set_sharing_strategy... | [((645, 668), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (662, 668), False, 'import torch\n'), ((692, 712), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (706, 712), True, 'import numpy as np\n'), ((749, 766), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (760... |
import hashlib
from typing import TypeVar, Union
import redis
from openff.toolkit.topology import Molecule
from openff.bespokefit.executor.services.qcgenerator import worker
from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask
from openff.bespokefit.utilities.molecule import canonic... | [
"openff.toolkit.topology.Molecule.from_smiles",
"typing.TypeVar"
] | [((341, 400), 'typing.TypeVar', 'TypeVar', (['"""_T"""', 'HessianTask', 'OptimizationTask', 'Torsion1DTask'], {}), "('_T', HessianTask, OptimizationTask, Torsion1DTask)\n", (348, 400), False, 'from typing import TypeVar, Union\n'), ((609, 671), 'openff.toolkit.topology.Molecule.from_smiles', 'Molecule.from_smiles', (['... |
'''Copyright Gigaspaces, 2017, All Rights Reserved'''
from cloudify.plugins import lifecycle
OP_START = 'hacker.interfaces.lifecycle.start'
OP_STOP = 'hacker.interfaces.lifecycle.stop'
OP_SS_C = 'hacker.interfaces.lifecycle.create_snapshots'
OP_SS_D = 'hacker.interfaces.lifecycle.delete_snapshots'
REQUIRED_OPS = set([... | [
"cloudify.plugins.lifecycle.is_host_node"
] | [((3599, 3631), 'cloudify.plugins.lifecycle.is_host_node', 'lifecycle.is_host_node', (['instance'], {}), '(instance)\n', (3621, 3631), False, 'from cloudify.plugins import lifecycle\n')] |
# coding=utf-8
# Copyright <NAME>, <NAME>, <NAME> and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LI... | [
"tensorflow.convert_to_tensor",
"transformers.TFLEDForConditionalGeneration.from_pretrained",
"tensorflow.range",
"tensorflow.concat",
"tensorflow.constant",
"tensorflow.ones_like",
"tensorflow.zeros_like",
"tensorflow.debugging.assert_near",
"transformers.is_tf_available",
"transformers.TFLEDMode... | [((901, 918), 'transformers.is_tf_available', 'is_tf_available', ([], {}), '()\n', (916, 918), False, 'from transformers import LEDConfig, is_tf_available\n'), ((12875, 12911), 'tensorflow.constant', 'tf.constant', (['tok_lst'], {'dtype': 'tf.int32'}), '(tok_lst, dtype=tf.int32)\n', (12886, 12911), True, 'import tensor... |
import contextlib
from datetime import date
from datetime import datetime
from datetime import timezone
from functools import wraps
from io import BytesIO
from itertools import count
from typing import Any
from typing import Dict
from typing import Sequence
import pytest
from dateutil.parser import parse as parse_date... | [
"dateutil.parser.parse",
"lxml.etree.XPath",
"dateutil.relativedelta.relativedelta",
"common.renderers.counter_generator",
"django.urls.reverse",
"common.util.TaricDateRange",
"lxml.etree.XML",
"functools.wraps",
"common.util.get_field_tuple",
"pytest.fail",
"datetime.datetime.now",
"itertools... | [((1138, 1228), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not COMMODITIES_IMPLEMENTED)'], {'reason': '"""Commodities not implemented"""'}), "(not COMMODITIES_IMPLEMENTED, reason=\n 'Commodities not implemented')\n", (1156, 1228), False, 'import pytest\n'), ((1274, 1394), 'pytest.mark.skipif', 'pytest.mark.skip... |
import logging
import unittest
from pyinstrument import Profiler
from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import get_test_nuplan_scenario
from nuplan.planning.simulation.history.simulation_history_buffer import SimulationHistoryBuffer
from nuplan.planning.simulation.observation.... | [
"logging.getLogger",
"logging.basicConfig",
"nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils.get_test_nuplan_scenario",
"pyinstrument.Profiler",
"nuplan.planning.simulation.observation.idm_agents.IDMAgents",
"unittest.main"
] | [((465, 492), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (482, 492), False, 'import logging\n'), ((493, 532), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (512, 532), False, 'import logging\n'), ((2252, 2267), 'unittest.main'... |
"""
YTArray class.
"""
from __future__ import print_function
#-----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this so... | [
"numpy.bitwise_or",
"numpy.union1d",
"yt.units.dimensions.em_dimensions.get",
"numpy.hstack",
"yt.units.unit_object.UnitParseError",
"yt.units.unit_lookup_table.default_unit_symbol_lut.copy",
"yt.utilities.exceptions.YTInvalidUnitEquivalence",
"numpy.array",
"numpy.linalg.norm",
"copy.deepcopy",
... | [((2243, 2249), 'yt.units.unit_object.Unit', 'Unit', ([], {}), '()\n', (2247, 2249), False, 'from yt.units.unit_object import Unit, UnitParseError\n'), ((2777, 2812), 'yt.utilities.lru_cache.lru_cache', 'lru_cache', ([], {'maxsize': '(128)', 'typed': '(False)'}), '(maxsize=128, typed=False)\n', (2786, 2812), False, 'fr... |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not u... | [
"tvm.tir.serial",
"tvm.tir.Schedule",
"tvm.tir.block",
"tvm.tir.store",
"tvm.tir.match_buffer",
"tvm.tir.alloc_buffer",
"tvm.tir.bind",
"tvm.tir.grid",
"tvm.tir.writes",
"tvm.tir.reads",
"pytest.raises",
"tvm.tir.load",
"tvm.ir.assert_structural_equal",
"tvm.tir.where"
] | [((1063, 1094), 'tvm.tir.match_buffer', 'tir.match_buffer', (['a', '(128, 128)'], {}), '(a, (128, 128))\n', (1079, 1094), False, 'from tvm import tir\n'), ((1103, 1131), 'tvm.tir.alloc_buffer', 'tir.alloc_buffer', (['(128, 128)'], {}), '((128, 128))\n', (1119, 1131), False, 'from tvm import tir\n'), ((1140, 1171), 'tvm... |
from flask import Flask, Response, request, redirect
import subprocess
import tempfile
import json
import yaml
import signal
import threading
import time
import copy
app = Flask(__name__)
jobs_lock = threading.Lock()
jobs = []
class Job(threading.Thread):
def __init__(self, jobid, path, inputobj):
super... | [
"flask.request.args.get",
"json.loads",
"flask.Flask",
"flask.request.stream.read",
"threading.Lock",
"subprocess.Popen",
"json.dumps",
"yaml.load",
"time.sleep",
"flask.redirect",
"tempfile.mkdtemp",
"copy.copy",
"tempfile.mkstemp"
] | [((173, 188), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (178, 188), False, 'from flask import Flask, Response, request, redirect\n'), ((202, 218), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (216, 218), False, 'import threading\n'), ((2719, 2757), 'flask.redirect', 'redirect', (["('/jobs/%i'... |
from sys import argv
from PyPDF2 import PdfFileReader, PdfFileWriter
import re
range_pattern = re.compile(r'(\d+)(\.\.|-)(\d+)')
comma_pattern = re.compile('\d+(,\d+)*')
def pages_args_to_array(pages_str):
groups = range_pattern.search(pages_str)
if groups:
start = int(groups.group(1))
end = int(groups.group(3)... | [
"PyPDF2.PdfFileWriter",
"PyPDF2.PdfFileReader",
"re.compile"
] | [((97, 133), 're.compile', 're.compile', (['"""(\\\\d+)(\\\\.\\\\.|-)(\\\\d+)"""'], {}), "('(\\\\d+)(\\\\.\\\\.|-)(\\\\d+)')\n", (107, 133), False, 'import re\n'), ((147, 173), 're.compile', 're.compile', (['"""\\\\d+(,\\\\d+)*"""'], {}), "('\\\\d+(,\\\\d+)*')\n", (157, 173), False, 'import re\n'), ((845, 860), 'PyPDF2... |
import math
from vp import geom_tools
def horizon_error(ground_truth_horizon, detected_horizon, image_dims):
"""Calculates error in a detected horizon.
This measures the max distance between the detected horizon line and
the ground truth horizon line, within the image's x-axis, and
normalized by ima... | [
"vp.geom_tools.get_line_angle",
"vp.geom_tools.point_to_point_dist",
"math.log"
] | [((1771, 1862), 'vp.geom_tools.get_line_angle', 'geom_tools.get_line_angle', (['(principal_point[0], principal_point[1], gt_vp[0], gt_vp[1])'], {}), '((principal_point[0], principal_point[1], gt_vp[0],\n gt_vp[1]))\n', (1796, 1862), False, 'from vp import geom_tools\n'), ((1899, 1990), 'vp.geom_tools.get_line_angle'... |
import numpy as np
import argparse
import composition
import os
import json
import torch
from spinup.algos.pytorch.ppo.core import MLPActorCritic
from spinup.algos.pytorch.ppo.ppo import ppo
from spinup.utils.run_utils import setup_logger_kwargs
from spinup.utils.mpi_tools import proc_id, num_procs
def parse_args()... | [
"argparse.ArgumentParser",
"spinup.utils.run_utils.setup_logger_kwargs",
"spinup.utils.mpi_tools.num_procs",
"os.path.join",
"torch.set_num_threads",
"numpy.random.seed",
"composition.make",
"spinup.utils.mpi_tools.proc_id",
"json.dump"
] | [((335, 360), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (358, 360), False, 'import argparse\n'), ((1997, 2022), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (2011, 2022), True, 'import numpy as np\n'), ((2998, 3022), 'torch.set_num_threads', 'torch.set_num_t... |
"""igvm - The command line interface
Copyright (c) 2017 InnoGames GmbH
"""
from __future__ import print_function
from argparse import ArgumentParser, _SubParsersAction
from logging import StreamHandler, root as root_logger
import time
from fabric.network import disconnect_all
from igvm.commands import (
change_... | [
"igvm.libvirt.close_virtconns",
"fabric.network.disconnect_all",
"time.sleep",
"logging.root.getChild",
"logging.root.setLevel"
] | [((15035, 15062), 'logging.root.setLevel', 'root_logger.setLevel', (['level'], {}), '(level)\n', (15055, 15062), True, 'from logging import StreamHandler, root as root_logger\n'), ((14255, 14271), 'fabric.network.disconnect_all', 'disconnect_all', ([], {}), '()\n', (14269, 14271), False, 'from fabric.network import dis... |
import torch
import torchvision.transforms as transforms
from torch.utils.data import Dataset
import glob
from PIL import Image
import random
class SUN397EncodableDataset(Dataset):
"""SUN397 encodable dataset class"""
def __init__(self, train=True):
super().__init__()
path = 'data/SUN397/trai... | [
"PIL.Image.open",
"random.shuffle",
"torch.is_tensor",
"torch.cuda.is_available",
"torchvision.transforms.Normalize",
"torchvision.transforms.Resize",
"torchvision.transforms.ToTensor",
"glob.glob"
] | [((422, 447), 'random.shuffle', 'random.shuffle', (['self.data'], {}), '(self.data)\n', (436, 447), False, 'import random\n'), ((987, 1007), 'torch.is_tensor', 'torch.is_tensor', (['idx'], {}), '(idx)\n', (1002, 1007), False, 'import torch\n'), ((397, 412), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (406, 41... |
import tensorflow
from tensorflow import keras
Model = keras.models.Model
Dense = keras.layers.Dense
Activation = keras.layers.Activation
Flatten = keras.layers.Flatten
BatchNormalization= keras.layers.BatchNormalization
Conv2D = tensorflow.keras.layers.Conv2D
AveragePooling2D = keras.layers.AveragePooling2D
I... | [
"tensorflow.keras.layers.add"
] | [((4292, 4327), 'tensorflow.keras.layers.add', 'tensorflow.keras.layers.add', (['[x, y]'], {}), '([x, y])\n', (4319, 4327), False, 'import tensorflow\n')] |
#!/usr/bin/env python3
import sys
import json
import rdflib
import rdflib.plugins.sparql as sparql
RELS_TO_DRAW = ['isWifeOf', 'isMotherOf', 'isFatherOf', 'isHusbandOf', 'isSpouseOf']
RELS_TO_INFER = ['hasGrandParent', 'isGrandParentOf', 'hasGreatGrandParent',
'isGreatGrandParentOf', 'isUncleOf', 'ha... | [
"sys.setrecursionlimit",
"json.dumps",
"rdflib.Graph",
"rdflib.plugins.sparql.prepareQuery",
"sys.exit",
"rdflib.Namespace"
] | [((884, 898), 'rdflib.Graph', 'rdflib.Graph', ([], {}), '()\n', (896, 898), False, 'import rdflib\n'), ((1029, 1055), 'rdflib.Namespace', 'rdflib.Namespace', (['fhkb_str'], {}), '(fhkb_str)\n', (1045, 1055), False, 'import rdflib\n'), ((1069, 1097), 'rdflib.Namespace', 'rdflib.Namespace', (['schema_str'], {}), '(schema... |
from os import environ
import psycopg2
from datetime import timedelta
from dotenv import load_dotenv
load_dotenv()
class Config(object):
""" app configuration class """
TESTING = False
CSRF_ENABLED = True
SECRET_KEY = environ.get('SECRET_KEY')
USER = environ.get('DB_USER')
PASSWORD = environ.g... | [
"datetime.timedelta",
"os.environ.get",
"dotenv.load_dotenv"
] | [((101, 114), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (112, 114), False, 'from dotenv import load_dotenv\n'), ((235, 260), 'os.environ.get', 'environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (246, 260), False, 'from os import environ\n'), ((273, 295), 'os.environ.get', 'environ.get', (['"""D... |
from __future__ import division
from math import sqrt as sqrt
from itertools import product as product
import torch
import numpy as np
import cv2
from lib.utils.visualize_utils import TBWriter
def vis(func):
"""tensorboard visualization if has writer as input"""
def wrapper(*args, **kw):
return func... | [
"cv2.rectangle",
"numpy.ones",
"numpy.hstack",
"torch.Tensor",
"math.sqrt",
"numpy.array",
"cv2.circle",
"copy.deepcopy",
"cv2.resize",
"cv2.imread"
] | [((5886, 5904), 'copy.deepcopy', 'copy.deepcopy', (['cfg'], {}), '(cfg)\n', (5899, 5904), False, 'import copy\n'), ((6247, 6265), 'copy.deepcopy', 'copy.deepcopy', (['cfg'], {}), '(cfg)\n', (6260, 6265), False, 'import copy\n'), ((6845, 6863), 'copy.deepcopy', 'copy.deepcopy', (['cfg'], {}), '(cfg)\n', (6858, 6863), Fa... |
##########################################################################
#
# Copyright (c) 2012-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redi... | [
"IECore.IgnoredExceptions",
"Gaffer.Path.info",
"Gaffer.FileSystemPath",
"IECore.FileSequence.fileNameValidator",
"IECore.findSequences"
] | [((2497, 2519), 'Gaffer.Path.info', 'Gaffer.Path.info', (['self'], {}), '(self)\n', (2513, 2519), False, 'import Gaffer\n'), ((3958, 4019), 'IECore.findSequences', 'IECore.findSequences', (['leafPathStrings', 'self.__minSequenceSize'], {}), '(leafPathStrings, self.__minSequenceSize)\n', (3978, 4019), False, 'import IEC... |
from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer
from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle
from PhysicsTools.Heppy.physicsobjects.Tau import Tau
from PhysicsTools.HeppyCore.utils.deltar import deltaR, matchObjectCollection3
import PhysicsTools.HeppyCore.framework.config as cfg... | [
"PhysicsTools.HeppyCore.framework.config.Analyzer",
"PhysicsTools.HeppyCore.utils.deltar.matchObjectCollection3",
"PhysicsTools.Heppy.analyzers.core.AutoHandle.AutoHandle"
] | [((6681, 7442), 'PhysicsTools.HeppyCore.framework.config.Analyzer', 'cfg.Analyzer', ([], {'class_object': 'TauAnalyzer', 'inclusive_ptMin': '(18)', 'inclusive_etaMax': '(9999)', 'inclusive_dxyMax': '(1000.0)', 'inclusive_dzMax': '(0.4)', 'inclusive_vetoLeptons': '(False)', 'inclusive_leptonVetoDR': '(0.4)', 'inclusive_... |
# <NAME> 170401038
import math
import random
r = 3271
def egcd(a,b):
if(a == 0):
return(b,0,1)
else:
c,d,e = egcd(b % a, a)
return(c, e - (b // a) * d, d)
def modInvert(a,b):
c,d,e = egcd(a,b)
if c != 1:
raise Exception('moduler ters bulunamadi')
... | [
"math.gcd",
"random.randrange"
] | [((388, 426), 'random.randrange', 'random.randrange', (['(2 ** (n - 1))', '(2 ** n)'], {}), '(2 ** (n - 1), 2 ** n)\n', (404, 426), False, 'import random\n'), ((897, 923), 'random.randrange', 'random.randrange', (['(2)', '(p - 2)'], {}), '(2, p - 2)\n', (913, 923), False, 'import random\n'), ((1369, 1391), 'random.rand... |
import pytest
import time
import subprocess
from subprocess import run,Popen
from seldon_utils import *
from k8s_utils import *
def wait_for_shutdown(deploymentName):
ret = run("kubectl get deploy/"+deploymentName, shell=True)
while ret.returncode == 0:
time.sleep(1)
ret = run("kubectl get depl... | [
"subprocess.run",
"pytest.mark.usefixtures",
"time.sleep"
] | [((1033, 1078), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""seldon_java_images"""'], {}), "('seldon_java_images')\n", (1056, 1078), False, 'import pytest\n'), ((1080, 1138), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""single_namespace_seldon_ksonnet"""'], {}), "('single_namespace_seldon_... |
from django.db import models
from .query import BookQuerySet
class Book(models.Model):
objects = BookQuerySet.as_manager()
title = models.CharField(max_length=50)
publication_date = models.DateTimeField()
author = models.ForeignKey('Author')
genres = models.ManyToManyField('Genre')
class Autho... | [
"django.db.models.DateTimeField",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.ForeignKey"
] | [((143, 174), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (159, 174), False, 'from django.db import models\n'), ((198, 220), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (218, 220), False, 'from django.db import models\n'), ((234, 2... |
from matplotlib.pyplot import title
import streamlit as st
import pandas as pd
import altair as alt
import pydeck as pdk
import os
import glob
from wordcloud import WordCloud
import streamlit_analytics
path = os.path.dirname(__file__)
streamlit_analytics.start_tracking()
@st.cache
def load_gnd_top_daten(typ):
gn... | [
"pydeck.Layer",
"pandas.read_csv",
"altair.Chart",
"pydeck.Deck",
"pydeck.ViewState",
"altair.X",
"altair.Y",
"altair.Legend",
"streamlit.header",
"streamlit.title",
"streamlit.sidebar.info",
"streamlit_analytics.stop_tracking",
"streamlit.sidebar.header",
"pandas.DataFrame.from_dict",
"... | [((210, 235), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (225, 235), False, 'import os\n'), ((237, 273), 'streamlit_analytics.start_tracking', 'streamlit_analytics.start_tracking', ([], {}), '()\n', (271, 273), False, 'import streamlit_analytics\n'), ((16582, 16607), 'streamlit.title', 's... |
#<NAME> 150401052
import os
import sys
import time
from socket import *
from os import system, name
ip = '127.0.0.1'
port = 42
s_soket = socket(AF_INET, SOCK_DGRAM)
s_soket.bind((ip, port))
print("\nSunucu Hazir\n")
kontrol, istemciAdres = s_soket.recv... | [
"os.listdir",
"sys.exit"
] | [((702, 712), 'sys.exit', 'sys.exit', ([], {}), '()\n', (710, 712), False, 'import sys\n'), ((561, 573), 'os.listdir', 'os.listdir', ([], {}), '()\n', (571, 573), False, 'import os\n'), ((2831, 2841), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2839, 2841), False, 'import sys\n'), ((3005, 3015), 'sys.exit', 'sys.exit', ... |
"""
Scatter plot with panning and zooming
Shows a scatter plot of a set of random points,
with basic Chaco panning and zooming.
Interacting with the plot:
- Left-mouse-drag pans the plot.
- Mouse wheel up and down zooms the plot in and out.
- Pressing "z" brings up the Zoom Box, and you can click-drag a recta... | [
"traits.api.Instance",
"chaco.api.ArrayPlotData",
"chaco.tools.api.PanTool",
"chaco.tools.api.ZoomTool",
"numpy.random.random",
"chaco.api.Plot",
"enable.api.ComponentEditor"
] | [((1150, 1164), 'numpy.random.random', 'random', (['numpts'], {}), '(numpts)\n', (1156, 1164), False, 'from numpy.random import random\n'), ((1228, 1243), 'chaco.api.ArrayPlotData', 'ArrayPlotData', ([], {}), '()\n', (1241, 1243), False, 'from chaco.api import ArrayPlotData, Plot\n'), ((1334, 1342), 'chaco.api.Plot', '... |
import pandas as pd
import argparse
import json
try:
from graphviz import Digraph
except:
print("Note: Optional graphviz not installed")
def generate_graph(df, graph_format='pdf'):
g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format)
g.attr(overlap='false')
g.attr... | [
"argparse.ArgumentParser",
"pandas.DataFrame",
"pandas.read_csv",
"json.load",
"graphviz.Digraph"
] | [((199, 286), 'graphviz.Digraph', 'Digraph', (['"""ModelFlow"""'], {'filename': '"""modelflow.gv"""', 'engine': '"""neato"""', 'format': 'graph_format'}), "('ModelFlow', filename='modelflow.gv', engine='neato', format=\n graph_format)\n", (206, 286), False, 'from graphviz import Digraph\n'), ((1681, 1695), 'pandas.D... |
import discord
import os
import json
import datetime
import pandas as pd
from dateutil.relativedelta import relativedelta
from pprint import pprint
import base.ColorPrint as CPrint
import command.voice_log.Config_Main as CSetting
def most_old_Month() :
old_month = 1
labels = []
fileNameList = []
while True :
... | [
"os.path.exists",
"datetime.now",
"dateutil.relativedelta.relativedelta",
"datetime.datetime.strptime",
"base.ColorPrint.error_print",
"pandas.merge",
"json.load",
"datetime.datetime.today",
"traceback.print_exc",
"pandas.DataFrame",
"datetime.datetime.strftime",
"pprint.pprint"
] | [((4326, 4369), 'pandas.DataFrame', 'pd.DataFrame', (['df_dict'], {'index': 'members_IDlist'}), '(df_dict, index=members_IDlist)\n', (4338, 4369), True, 'import pandas as pd\n'), ((404, 446), 'datetime.datetime.strftime', 'datetime.datetime.strftime', (['filetime', '"""%m"""'], {}), "(filetime, '%m')\n", (430, 446), Fa... |
from abc import ABCMeta, abstractmethod
import os
from vmaf.tools.misc import make_absolute_path, run_process
from vmaf.tools.stats import ListStats
__copyright__ = "Copyright 2016-2018, Netflix, Inc."
__license__ = "Apache, Version 2.0"
import re
import numpy as np
import ast
from vmaf import ExternalProgramCaller,... | [
"vmaf.ExternalProgramCaller.call_vmaf_feature",
"vmaf.core.result.Result",
"vmaf.ExternalProgramCaller.call_ssim",
"numpy.hstack",
"re.match",
"ast.literal_eval",
"numpy.array",
"vmaf.ExternalProgramCaller.call_psnr",
"numpy.isnan",
"numpy.vstack",
"vmaf.ExternalProgramCaller.call_ms_ssim",
"n... | [((1573, 1607), 'vmaf.core.result.Result', 'Result', (['asset', 'executor_id', 'result'], {}), '(asset, executor_id, result)\n', (1579, 1607), False, 'from vmaf.core.result import Result\n'), ((6044, 6146), 'vmaf.ExternalProgramCaller.call_vmaf_feature', 'ExternalProgramCaller.call_vmaf_feature', (['yuv_type', 'ref_pat... |
# -*- coding: utf-8 -*-
"""
Created by susy at 2019/11/8
"""
from dao.dao import DataDao
import pytz
from dao.models import PanAccounts
from cfg import PAN_SERVICE, MASTER_ACCOUNT_ID
class BaseService:
def __init__(self):
self.default_tz = pytz.timezone('Asia/Chongqing')
# self.pan_acc: PanAccoun... | [
"pytz.timezone"
] | [((255, 286), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Chongqing"""'], {}), "('Asia/Chongqing')\n", (268, 286), False, 'import pytz\n')] |
import os, time, argparse
from datetime import datetime
from pm4py.objects.log.importer.csv import factory as csv_importer
from pm4py.objects.log.exporter.xes import factory as xes_exporter
from pm4py.objects.log.importer.xes import factory as xes_importer
from pm4py.objects.petri.importer import pnml as pnml_importer... | [
"datetime.datetime.fromtimestamp",
"argparse.ArgumentParser",
"pm4py.objects.log.importer.xes.factory.import_log",
"pm4py.evaluation.precision.factory.apply",
"os.path.join",
"pm4py.objects.log.exporter.xes.factory.export_log",
"time.sleep",
"pm4py.objects.log.importer.csv.factory.import_event_log",
... | [((526, 537), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (535, 537), False, 'import os, time, argparse\n'), ((2537, 2562), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2560, 2562), False, 'import os, time, argparse\n'), ((4595, 4608), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (460... |
# $ python embed.py
from ctypes import cdll
lib = cdll.LoadLibrary("../target/release/libembed.dylib") #=> for Mac
#lib = cdll.LoadLibrary("../target/release/libembed.so") #=> for Linux
lib.process()
print("done!")
| [
"ctypes.cdll.LoadLibrary"
] | [((52, 104), 'ctypes.cdll.LoadLibrary', 'cdll.LoadLibrary', (['"""../target/release/libembed.dylib"""'], {}), "('../target/release/libembed.dylib')\n", (68, 104), False, 'from ctypes import cdll\n')] |
#!/usr/bin/env python
import unittest
from rdflib.graph import ConjunctiveGraph
from rdflib.term import URIRef, Literal
from rdflib.graph import Graph
class TestTrixSerialize(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testSerialize(self):
s1 = URIRef... | [
"StringIO.StringIO",
"rdflib.term.URIRef",
"rdflib.term.Literal",
"rdflib.graph.Graph",
"rdflib.graph.ConjunctiveGraph",
"unittest.main"
] | [((1684, 1699), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1697, 1699), False, 'import unittest\n'), ((314, 331), 'rdflib.term.URIRef', 'URIRef', (['"""store:1"""'], {}), "('store:1')\n", (320, 331), False, 'from rdflib.term import URIRef, Literal\n'), ((343, 363), 'rdflib.term.URIRef', 'URIRef', (['"""resour... |
from sklearn.linear_model import LogisticRegression
from fightchurn.listings.chap8.listing_8_2_logistic_regression import prepare_data, save_regression_model
from fightchurn.listings.chap8.listing_8_2_logistic_regression import save_regression_summary, save_dataset_predictions
def regression_cparam(data_set_path, C_pa... | [
"fightchurn.listings.chap8.listing_8_2_logistic_regression.prepare_data",
"sklearn.linear_model.LogisticRegression",
"fightchurn.listings.chap8.listing_8_2_logistic_regression.save_regression_summary",
"fightchurn.listings.chap8.listing_8_2_logistic_regression.save_dataset_predictions",
"fightchurn.listings... | [((336, 363), 'fightchurn.listings.chap8.listing_8_2_logistic_regression.prepare_data', 'prepare_data', (['data_set_path'], {}), '(data_set_path)\n', (348, 363), False, 'from fightchurn.listings.chap8.listing_8_2_logistic_regression import prepare_data, save_regression_model\n'), ((381, 468), 'sklearn.linear_model.Logi... |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# -----------------------------------------------------------
# created 02.02.2021, tkaulke
# <NAME>, <EMAIL>
# https://github.com/kaulketh
# -----------------------------------------------------------
__author__ = "<NAME>"
__email__ = "<EMAIL>"
import errno
import logging
... | [
"logging.getLogger",
"os.makedirs",
"logging.Formatter",
"os.path.join",
"os.path.isdir",
"logging.config.fileConfig",
"os.path.abspath"
] | [((498, 534), 'os.path.join', 'os.path.join', (['this_folder', '"""../logs"""'], {}), "(this_folder, '../logs')\n", (510, 534), False, 'import os\n'), ((1027, 1062), 'os.path.join', 'os.path.join', (['this_folder', 'ini_file'], {}), '(this_folder, ini_file)\n', (1039, 1062), False, 'import os\n'), ((1063, 1117), 'loggi... |
"""
"""
import unittest
from example_module import COLORS, increment
class ExampleTest(unittest.TestCase):
"""
#TODO
"""
def test_increment(self):
x0 = 0
y0 = increment(x0) #y0 == 1
self.assertEqual(y0, 1)
x1 = 100
y1 = increment(x1) #y1 == 101
se... | [
"example_module.increment"
] | [((195, 208), 'example_module.increment', 'increment', (['x0'], {}), '(x0)\n', (204, 208), False, 'from example_module import COLORS, increment\n'), ((285, 298), 'example_module.increment', 'increment', (['x1'], {}), '(x1)\n', (294, 298), False, 'from example_module import COLORS, increment\n')] |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import json
from pathlib import Path
import numpy as np
import torch
from PIL import Image
from panopticapi.utils import rgb2id
# from util.box_ops import masks_to_boxes
from .construction import make_construction_transforms
import logging
def... | [
"torch.as_tensor",
"PIL.Image.open",
"pathlib.Path",
"torch.stack",
"panopticapi.utils.rgb2id",
"torch.tensor",
"numpy.array",
"json.load",
"logging.error"
] | [((422, 444), 'torch.stack', 'torch.stack', (['b'], {'dim': '(-1)'}), '(b, dim=-1)\n', (433, 444), False, 'import torch\n'), ((1007, 1045), 'torch.tensor', 'torch.tensor', (['boxes'], {'dtype': 'torch.int64'}), '(boxes, dtype=torch.int64)\n', (1019, 1045), False, 'import torch\n'), ((1059, 1098), 'torch.tensor', 'torch... |
#!/usr/bin/python
'''
memory class
stored in sqlite data base
holds raw input and memories in parse taged columns
'''
import sys
import re
import sqlite3
import os
from datetime import date, datetime
from pattern.en import parse
from pattern.en import pprint
from pattern.en import parsetree
from pattern.en import ... | [
"datetime.datetime.now",
"sqlite3.connect"
] | [((537, 582), 'sqlite3.connect', 'sqlite3.connect', (["(dir + 'robbie_memory.sqlite')"], {}), "(dir + 'robbie_memory.sqlite')\n", (552, 582), False, 'import sqlite3\n'), ((2301, 2315), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2313, 2315), False, 'from datetime import date, datetime\n')] |
from datetime import datetime
from difflib import unified_diff
from logging import basicConfig, getLogger, INFO
import os
from pathlib import Path
import shutil
import subprocess
import sys
import yaml
from urllib.parse import urlparse
from notebook import notebookapp
from IPython.core.display import HTML
WORKDIR = ... | [
"logging.getLogger",
"subprocess.check_output",
"logging.basicConfig",
"yaml.safe_dump",
"urllib.parse.urlparse",
"IPython.core.display.HTML",
"shutil.copy2",
"subprocess.check_call",
"pathlib.Path",
"subprocess.run",
"notebook.notebookapp.list_running_servers",
"difflib.unified_diff",
"yaml... | [((487, 506), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (496, 506), False, 'from logging import basicConfig, getLogger, INFO\n'), ((507, 552), 'logging.basicConfig', 'basicConfig', ([], {'level': 'INFO', 'format': '"""%(message)s"""'}), "(level=INFO, format='%(message)s')\n", (518, 552), Fal... |
from operator import attrgetter
import logging
import os
import shutil
import subprocess
import pyfastaq
import pymummer
from cluster_vcf_records import vcf_record
from varifier import utils
# We only want the .snps file from the dnadiff script from MUMmer. From reading
# the docs inspecting that script, we need to ... | [
"subprocess.check_output",
"operator.attrgetter",
"pyfastaq.sequences.Fasta",
"shutil.copyfileobj",
"varifier.utils.file_to_dict_of_seqs",
"pymummer.snp_file.get_all_variants",
"os.unlink",
"pyfastaq.sequences.file_reader",
"logging.info"
] | [((768, 831), 'subprocess.check_output', 'subprocess.check_output', (['f"""rm -f {delta} {delta_1}"""'], {'shell': '(True)'}), "(f'rm -f {delta} {delta_1}', shell=True)\n", (791, 831), False, 'import subprocess\n'), ((1299, 1315), 'os.unlink', 'os.unlink', (['delta'], {}), '(delta)\n', (1308, 1315), False, 'import os\n... |
from __future__ import absolute_import
import datetime
from dateutil import parser
import pytz
from .base import FieldFilter, DictFilterMixin, DjangoQueryFilterMixin
from .queryfilter import QueryFilter
WHOLE_DAY = datetime.timedelta(days=1)
ONE_SECOND = datetime.timedelta(seconds=1)
@QueryFilter.register_type_c... | [
"datetime.datetime.max.replace",
"dateutil.parser.parse",
"datetime.timedelta",
"datetime.datetime.min.replace"
] | [((220, 246), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (238, 246), False, 'import datetime\n'), ((260, 289), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (278, 289), False, 'import datetime\n'), ((2032, 2078), 'datetime.datetime.min.repla... |
"""
This module implements the Request class which is used to represent HTTP
requests in Scrapy.
See documentation in docs/topics/request-response.rst
"""
from w3lib.url import safe_url_string
from scrapy.http.headers import Headers
from scrapy.utils.python import to_bytes
from scrapy.utils.trackref import object_ref... | [
"scrapy.utils.python.to_bytes",
"scrapy.utils.url.escape_ajax",
"scrapy.http.headers.Headers",
"scrapy.utils.curl.curl_to_request_kwargs",
"scrapy.http.common.obsolete_setter",
"w3lib.url.safe_url_string"
] | [((1442, 1483), 'scrapy.http.headers.Headers', 'Headers', (['(headers or {})'], {'encoding': 'encoding'}), '(headers or {}, encoding=encoding)\n', (1449, 1483), False, 'from scrapy.http.headers import Headers\n'), ((2183, 2218), 'w3lib.url.safe_url_string', 'safe_url_string', (['url', 'self.encoding'], {}), '(url, self... |
# HomeAssistant Status Output
# Publishes the provided sensor key and value pair to a HomeAssistant instance
import logging
import time
from ww import f
logger = logging.getLogger(__name__.rsplit(".")[-1])
class HASSStatus:
import threading
import requests
apiKey = None
config = None
configCo... | [
"ww.f",
"threading.Lock",
"time.time",
"time.sleep"
] | [((599, 615), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (613, 615), False, 'import threading\n'), ((2385, 2418), 'time.sleep', 'time.sleep', (['self.msgRateInSeconds'], {}), '(self.msgRateInSeconds)\n', (2395, 2418), False, 'import time\n'), ((6495, 6506), 'time.time', 'time.time', ([], {}), '()\n', (6504, ... |
from flask import Blueprint, jsonify, request, render_template
home_routes = Blueprint("home_routes", __name__)
@home_routes.route("/")
def index():
users = User.query.all()
return render_template('base.html', title='Home',
users=users)
@home_routes.route("/about")
def a... | [
"flask.render_template",
"flask.Blueprint"
] | [((79, 113), 'flask.Blueprint', 'Blueprint', (['"""home_routes"""', '__name__'], {}), "('home_routes', __name__)\n", (88, 113), False, 'from flask import Blueprint, jsonify, request, render_template\n'), ((197, 252), 'flask.render_template', 'render_template', (['"""base.html"""'], {'title': '"""Home"""', 'users': 'use... |
import copy
import functools
import itertools
import numbers
import warnings
from collections import defaultdict
from datetime import timedelta
from distutils.version import LooseVersion
from typing import (
Any,
Dict,
Hashable,
Mapping,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
... | [
"numpy.prod",
"numpy.ma.getmaskarray",
"numpy.logical_not",
"numpy.asanyarray",
"copy.deepcopy",
"copy.copy",
"numpy.asarray",
"functools.wraps",
"numpy.concatenate",
"numpy.datetime64",
"warnings.warn",
"dask.array.from_array",
"numpy.isnan",
"numpy.nonzero",
"numpy.timedelta64",
"war... | [((1294, 1335), 'typing.TypeVar', 'TypeVar', (['"""VariableType"""'], {'bound': '"""Variable"""'}), "('VariableType', bound='Variable')\n", (1301, 1335), False, 'from typing import Any, Dict, Hashable, Mapping, Optional, Sequence, Tuple, TypeVar, Union\n'), ((7749, 7765), 'numpy.asarray', 'np.asarray', (['data'], {}), ... |
# -*- coding: utf-8 -*-
#
# Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
Tests for AMF utilities.
@since: 0.1.0
"""
import unittest
from datetime import datetime
from io import BytesIO
import pyamf
from pyamf import util
from pyamf.tests.util import replace_dict
PosInf = 1e300000
NegInf = -... | [
"datetime.datetime",
"pyamf.util.get_timestamp",
"pyamf.util.BufferedByteStream",
"pyamf.util.is_class_sealed",
"pyamf.ALIAS_TYPES.copy",
"pyamf.register_alias_type",
"pyamf.util.get_class_alias",
"pyamf.tests.util.replace_dict",
"pyamf.util.get_class_meta",
"pyamf.util.get_datetime"
] | [((1062, 1102), 'datetime.datetime', 'datetime', (['(2009)', '(3)', '(8)', '(23)', '(30)', '(47)', '(770122)'], {}), '(2009, 3, 8, 23, 30, 47, 770122)\n', (1070, 1102), False, 'from datetime import datetime\n'), ((1116, 1138), 'pyamf.util.get_timestamp', 'util.get_timestamp', (['dt'], {}), '(dt)\n', (1134, 1138), False... |
# Copyright (c) 2013 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/li... | [
"nova.exception.PciDevicePoolEmpty",
"nova.objects.InstancePCIRequest",
"nova.exception.PciDeviceRequestFailed",
"nova.objects.pci_device_pool.from_pci_stats",
"nova.pci.whitelist.Whitelist",
"copy.deepcopy",
"nova.pci.utils.pci_device_prop_match",
"oslo_log.log.getLogger"
] | [((1061, 1088), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1078, 1088), True, 'from oslo_log import log as logging\n'), ((30746, 30783), 'nova.objects.pci_device_pool.from_pci_stats', 'pci_device_pool.from_pci_stats', (['stats'], {}), '(stats)\n', (30776, 30783), False, 'from no... |
import multiple
multiple.rename("C:/Users/Username/Desktop",'new_name',33,'.exe')
"""this above lines renames all the files of the folder Desktop to 'new_name' and
count starts from 33 to further (we can also provide 1 to start it from 1) and
extension is given '.exe'
hence the files will be renamed like :
1. new_n... | [
"multiple.rename"
] | [((17, 85), 'multiple.rename', 'multiple.rename', (['"""C:/Users/Username/Desktop"""', '"""new_name"""', '(33)', '""".exe"""'], {}), "('C:/Users/Username/Desktop', 'new_name', 33, '.exe')\n", (32, 85), False, 'import multiple\n')] |
import os
def readlinkabs(l):
"""
Return an absolute path for the destination
of a symlink
"""
if not (os.path.islink(l)):
return None
p = os.readlink(l)
if os.path.isabs(p):
return p
return os.path.join(os.path.dirname(l), p)
| [
"os.path.dirname",
"os.path.islink",
"os.readlink",
"os.path.isabs"
] | [((173, 187), 'os.readlink', 'os.readlink', (['l'], {}), '(l)\n', (184, 187), False, 'import os\n'), ((195, 211), 'os.path.isabs', 'os.path.isabs', (['p'], {}), '(p)\n', (208, 211), False, 'import os\n'), ((125, 142), 'os.path.islink', 'os.path.islink', (['l'], {}), '(l)\n', (139, 142), False, 'import os\n'), ((254, 27... |
"""Generic functionality useful for all gene representations.
This module contains classes which can be used for all the different
types of patterns available for representing gene information (ie. motifs,
signatures and schemas). These are the general classes which should be
handle any of the different specific patte... | [
"Bio.Seq.Seq",
"random.choice",
"Bio.utils.verify_alphabet"
] | [((5776, 5809), 'random.choice', 'random.choice', (['self._pattern_list'], {}), '(self._pattern_list)\n', (5789, 5809), False, 'import random\n'), ((3497, 3530), 'Bio.Seq.Seq', 'Seq', (['pattern_item', 'self._alphabet'], {}), '(pattern_item, self._alphabet)\n', (3500, 3530), False, 'from Bio.Seq import Seq, MutableSeq\... |
# -*- test-case-name: epsilon.test.test_juice -*-
# Copyright 2005 Divmod, Inc. See LICENSE file for details
import warnings, pprint
import keyword
import io
import six
from twisted.internet.main import CONNECTION_LOST
from twisted.internet.defer import Deferred, maybeDeferred, fail
from twisted.internet.protocol im... | [
"epsilon.extime.Time.fromISO8601TimeAndDate",
"six.viewitems",
"epsilon.compat.long",
"twisted.internet.ssl.Certificate.peerFromTransport",
"twisted.python.log.err",
"twisted.python.log.msg",
"twisted.internet.defer.maybeDeferred",
"six.add_metaclass",
"io.BytesIO",
"six.ensure_binary",
"six.ens... | [((18926, 18957), 'six.add_metaclass', 'six.add_metaclass', (['_CommandMeta'], {}), '(_CommandMeta)\n', (18943, 18957), False, 'import six\n'), ((8656, 8679), 'keyword.iskeyword', 'keyword.iskeyword', (['lkey'], {}), '(lkey)\n', (8673, 8679), False, 'import keyword\n'), ((9859, 9866), 'epsilon.compat.long', 'long', (['... |
'''
<NAME>
set up :2020-1-9
intergrate img and label into one file
-- fiducial1024_v1
'''
import argparse
import sys, os
import pickle
import random
import collections
import json
import numpy as np
import scipy.io as io
import scipy.misc as m
import matplotlib.pyplot as plt
import glob
import math
import time
impo... | [
"pickle.dumps",
"numpy.array",
"numpy.linalg.norm",
"os.path.exists",
"os.listdir",
"numpy.full_like",
"argparse.ArgumentParser",
"numpy.linspace",
"numpy.dot",
"numpy.concatenate",
"numpy.meshgrid",
"random.randint",
"numpy.random.normal",
"numpy.abs",
"random.choice",
"numpy.ones",
... | [((595, 610), 'os.listdir', 'os.listdir', (['dir'], {}), '(dir)\n', (605, 610), False, 'import sys, os\n'), ((44019, 44132), 'threading.Thread', 'threading.Thread', ([], {'target': 'saveFold.save_img', 'args': "(m, n, 'fold', repeat_time, 'relativeShift_v2')", 'name': '"""fold"""'}), "(target=saveFold.save_img, args=(m... |
#!/usr/bin/env python
import argparse
import logging
try:
import ujson as json
except ImportError:
import json
import sys
import datetime
import os
import importlib
from gnip_tweet_evaluation import analysis,output
"""
Perform audience and/or conversation analysis on a set of Tweets.
"""
logger = logging.g... | [
"logging.getLogger",
"logging.StreamHandler",
"argparse.ArgumentParser",
"gnip_tweet_evaluation.output.dump_results",
"gnip_tweet_evaluation.analysis.setup_analysis",
"gnip_tweet_evaluation.analysis.deserialize_tweets",
"datetime.datetime.now",
"gnip_tweet_evaluation.analysis.compare_results",
"gnip... | [((311, 340), 'logging.getLogger', 'logging.getLogger', (['"""analysis"""'], {}), "('analysis')\n", (328, 340), False, 'import logging\n'), ((390, 413), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (411, 413), False, 'import logging\n'), ((461, 486), 'argparse.ArgumentParser', 'argparse.ArgumentP... |
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
class TwoLayerNet(object):
"""
A two-layer fully-connected neural network. The net has an input dimension
of N, a hidden layer dimension of H, and performs classification over C
classes.
We train the network... | [
"numpy.random.choice",
"numpy.max",
"numpy.exp",
"numpy.sum",
"numpy.zeros",
"numpy.maximum",
"numpy.random.randn",
"numpy.arange"
] | [((1558, 1579), 'numpy.zeros', 'np.zeros', (['hidden_size'], {}), '(hidden_size)\n', (1566, 1579), True, 'import numpy as np\n'), ((1684, 1705), 'numpy.zeros', 'np.zeros', (['output_size'], {}), '(output_size)\n', (1692, 1705), True, 'import numpy as np\n'), ((3464, 3486), 'numpy.maximum', 'np.maximum', (['(0)', 'score... |
from django.test import TestCase
from dynamic_setting.models import Setting
class SettingTestCase(TestCase):
def _create_setting(self, name, **kwargs):
return Setting.objects.create(name=name, **kwargs)
def test_create_setting(self):
""" Test Creating a new Setting. """
name = 'T... | [
"dynamic_setting.models.Setting.objects.create",
"dynamic_setting.models.Setting.objects.get"
] | [((178, 221), 'dynamic_setting.models.Setting.objects.create', 'Setting.objects.create', ([], {'name': 'name'}), '(name=name, **kwargs)\n', (200, 221), False, 'from dynamic_setting.models import Setting\n'), ((2228, 2262), 'dynamic_setting.models.Setting.objects.get', 'Setting.objects.get', ([], {'pk': 'setting.pk'}), ... |
"""Sensor for data from Austrian Zentralanstalt für Meteorologie."""
from __future__ import annotations
import logging
import voluptuous as vol
from homeassistant.components.weather import (
ATTR_WEATHER_HUMIDITY,
ATTR_WEATHER_PRESSURE,
ATTR_WEATHER_TEMPERATURE,
ATTR_WEATHER_WIND_BEARING,
ATTR_WE... | [
"logging.getLogger",
"voluptuous.Optional",
"voluptuous.Inclusive"
] | [((897, 924), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (914, 924), False, 'import logging\n'), ((982, 1005), 'voluptuous.Optional', 'vol.Optional', (['CONF_NAME'], {}), '(CONF_NAME)\n', (994, 1005), True, 'import voluptuous as vol\n'), ((1026, 1055), 'voluptuous.Optional', 'vol.Opti... |
import copy
import glob
import hashlib
import logging
import os
import shutil
from subprocess import CalledProcessError, DEVNULL, check_output # skipcq:BAN-B404
import tempfile
import typing
from pathlib import Path
from typing import Any, Text, Tuple, Union, Optional, List, Dict, NamedTuple
from packaging import ver... | [
"logging.getLogger",
"tarfile.open",
"logging.debug",
"rasa.utils.common.TempDirectoryPath",
"copy.deepcopy",
"copy.copy",
"rasa.exceptions.ModelNotFound",
"os.path.exists",
"shutil.move",
"pathlib.Path",
"rasa.cli.utils.create_output_path",
"os.path.isdir",
"packaging.version.parse",
"os.... | [((937, 964), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (954, 964), False, 'import logging\n'), ((4264, 4289), 'os.path.isdir', 'os.path.isdir', (['model_path'], {}), '(model_path)\n', (4277, 4289), False, 'import os\n'), ((6295, 6331), 'rasa.utils.common.TempDirectoryPath', 'TempDir... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 16 11:20:01 2021
@author: q
GOAL : develop a backtester from a .py framework / library
# installation :
pip install backtesting
# Documentation
Index :
- Manuals
- Tutorials
- Example Strategies
- FAQ
... | [
"backtesting.set_bokeh_output",
"backtesting.Backtest"
] | [((1079, 1123), 'backtesting.set_bokeh_output', 'backtesting.set_bokeh_output', ([], {'notebook': '(False)'}), '(notebook=False)\n', (1107, 1123), False, 'import backtesting\n'), ((2231, 2318), 'backtesting.Backtest', 'Backtest', (['GOOG', 'PriceAboveSMA'], {'commission': '(0.002)', 'exclusive_orders': '(True)', 'cash'... |
# Copyright (c) 2012 <NAME> y otros.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# ... | [
"pygments.lexers.get_lexer_for_filename",
"os.path.splitext",
"os.path.join",
"nikola.utils.slugify",
"nikola.utils.config_changed",
"os.path.basename",
"pygments.lexers.TextLexer",
"os.walk",
"os.path.relpath"
] | [((3391, 3421), 'os.walk', 'os.walk', (["kw['listings_folder']"], {}), "(kw['listings_folder'])\n", (3398, 3421), False, 'import os\n'), ((3502, 3559), 'os.path.join', 'os.path.join', (["kw['output_folder']", 'root', "kw['index_file']"], {}), "(kw['output_folder'], root, kw['index_file'])\n", (3514, 3559), False, 'impo... |
import numpy as np
from scipy import ndimage
def erode_value_blobs(array, steps=1, values_to_ignore=tuple(), new_value=0):
unique_values = list(np.unique(array))
all_entries_to_keep = np.zeros(shape=array.shape, dtype=np.bool)
for unique_value in unique_values:
entries_of_this_value = array == uni... | [
"numpy.unique",
"scipy.ndimage.binary_erosion",
"numpy.logical_not",
"numpy.logical_or",
"numpy.zeros"
] | [((194, 236), 'numpy.zeros', 'np.zeros', ([], {'shape': 'array.shape', 'dtype': 'np.bool'}), '(shape=array.shape, dtype=np.bool)\n', (202, 236), True, 'import numpy as np\n'), ((150, 166), 'numpy.unique', 'np.unique', (['array'], {}), '(array)\n', (159, 166), True, 'import numpy as np\n'), ((766, 801), 'numpy.logical_n... |
# coding: utf-8
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Test the Logarithmic Units and Quantities
"""
from __future__ import (absolute_import, unicode_literals, division,
print_function)
from ...extern import six
from ...extern.six.moves import zip
import pickle... | [
"numpy.abs",
"numpy.power",
"pickle.dumps",
"itertools.product",
"numpy.square",
"pytest.mark.parametrize",
"numpy.linspace",
"numpy.array",
"pytest.raises",
"numpy.testing.utils.assert_allclose",
"pickle.loads",
"numpy.all",
"numpy.arange"
] | [((1241, 1285), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""lu_unit"""', 'lu_units'], {}), "('lu_unit', lu_units)\n", (1264, 1285), False, 'import pytest\n'), ((6355, 6399), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""lu_unit"""', 'lu_units'], {}), "('lu_unit', lu_units)\n", (6378, 6399)... |
import os
import numpy as np
import tensorflow as tf
def get_train_data(train_dir, batch_size):
train_images = np.load(os.path.join(train_dir, 'train_images.npy'))
train_labels = np.load(os.path.join(train_dir, 'train_labels.npy'))
print('train_images', train_images.shape, 'train_labels', train_labels.sha... | [
"os.path.join",
"tensorflow.data.Dataset.from_tensor_slices"
] | [((345, 409), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['(train_images, train_labels)'], {}), '((train_images, train_labels))\n', (379, 409), True, 'import tensorflow as tf\n'), ((799, 861), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (... |
"""
Collection of tests asserting things that should be true for
any index subclass. Makes use of the `indices` fixture defined
in pandas/tests/indexes/conftest.py.
"""
import re
import numpy as np
import pytest
from pandas._libs.tslibs import iNaT
from pandas.core.dtypes.common import is_period_dtype, needs_i8_conv... | [
"pandas.Series",
"pytest.mark.filterwarnings",
"numpy.sort",
"pandas._testing.round_trip_pickle",
"pandas._testing.assert_index_equal",
"pytest.mark.parametrize",
"pandas._testing.assert_equal",
"pandas.core.dtypes.common.needs_i8_conversion",
"pytest.raises",
"numpy.concatenate",
"pandas._testi... | [((18137, 18193), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""na_position"""', "[None, 'middle']"], {}), "('na_position', [None, 'middle'])\n", (18160, 18193), False, 'import pytest\n'), ((19004, 19061), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""na_position"""', "['first', 'last']"], {... |
# Copyright 2020, <NAME>, mailto:<EMAIL>
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the Lice... | [
"math.copysign",
"math.isnan"
] | [((1947, 1968), 'math.copysign', 'math.copysign', (['(1.0)', 'a'], {}), '(1.0, a)\n', (1960, 1968), False, 'import math\n'), ((1972, 1993), 'math.copysign', 'math.copysign', (['(1.0)', 'b'], {}), '(1.0, b)\n', (1985, 1993), False, 'import math\n'), ((2032, 2045), 'math.isnan', 'math.isnan', (['a'], {}), '(a)\n', (2042,... |
# Copyright (c) 2012-2013 <NAME> http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.... | [
"logging.getLogger",
"botocore.utils.get_environ_proxies",
"botocore.utils.is_valid_endpoint_url",
"botocore.awsrequest.create_request_object",
"threading.Lock",
"botocore.history.get_global_history_recorder",
"os.environ.get",
"time.sleep",
"botocore.hooks.first_non_none_response",
"botocore.resp... | [((1122, 1149), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1139, 1149), False, 'import logging\n'), ((1169, 1198), 'botocore.history.get_global_history_recorder', 'get_global_history_recorder', ([], {}), '()\n', (1196, 1198), False, 'from botocore.history import get_global_history_re... |
from numpy import genfromtxt
import matplotlib.pyplot as plt
import mpl_finance
import numpy as np
import uuid
import matplotlib
# Input your csv file here with historical data
ad = genfromtxt(f"../financial_data/SM.csv", delimiter=",", dtype=str)
def convolve_sma(array, period):
return np.convolve(array, np.on... | [
"mpl_finance.candlestick2_ochl",
"numpy.ones",
"matplotlib.pyplot.clf",
"uuid.uuid4",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.autoscale",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.cla",
"numpy.genfromtxt"
] | [((184, 249), 'numpy.genfromtxt', 'genfromtxt', (['f"""../financial_data/SM.csv"""'], {'delimiter': '""","""', 'dtype': 'str'}), "(f'../financial_data/SM.csv', delimiter=',', dtype=str)\n", (194, 249), False, 'from numpy import genfromtxt\n'), ((1909, 1980), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'num': '(1)',... |
"""
The Tornado Framework
By <NAME>
University of Ottawa, Ontario, Canada
E-mail: apesaran -at- uottawa -dot- ca / alipsgh -at- gmail -dot- com
"""
import re
from data_structures.attribute import Attribute
from dictionary.tornado_dictionary import TornadoDic
class ARFFReader:
"""This class is used... | [
"re.sub",
"data_structures.attribute.Attribute"
] | [((970, 981), 'data_structures.attribute.Attribute', 'Attribute', ([], {}), '()\n', (979, 981), False, 'from data_structures.attribute import Attribute\n'), ((2108, 2132), 're.sub', 're.sub', (['"""\\\\s+"""', '""""""', 'line'], {}), "('\\\\s+', '', line)\n", (2114, 2132), False, 'import re\n')] |