_id
stringlengths
98
184
text
stringlengths
91
10.9k
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/optimizer/linesearch/hager_zhang.py#L420-L542
def _line_search_after_bracketing( value_and_gradients_function, search_interval, val_0, f_lim, max_iterations, sufficient_decrease_param, curvature_param, shrinkage_param): """ """ def _loop_cond(curr_interval): """Loop condition.""" active = ~(curr_interval.converged | c...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/ftp_hook.py#L152-L223
def retrieve_file( self, remote_full_path, local_full_path_or_buffer, callback=None): """ """ conn = self.get_conn() is_path = isinstance(local_full_path_or_buffer, basestring) # without a callback, default to writing to ...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/layers/conv_variational.py#L249-L284
def compute_output_shape(self, input_shape): """ """ input_shape = tf.TensorShape(input_shape).as_list() if self.data_format == 'channels_last': space = input_shape[1:-1] new_space = [] for i in range(len(space)): new_dim = tf_layers_util.conv_output_length( space[i...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/sts/internal/missing_values_util.py#L137-L162
def initial_value_of_masked_time_series(time_series_tensor, broadcast_mask): """ """ num_timesteps = tf.shape(input=time_series_tensor)[-1] # Compute the index of the first unmasked entry for each series in the batch. unmasked_negindices = ( tf.cast(~broadcast_mask, tf.int32) * tf.range(num_time...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/static_config.py#L29-L68
def configure_manifest_files(app): """ """ def parse_manifest_json(): # noinspection PyBroadException try: global manifest manifest_file = os.path.join(os.path.dirname(__file__), 'static/dist/manifest.json') w...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L185-L202
def check_for_key(self, key, bucket_name=None): """ """ if not bucket_name: (bucket_name, key) = self.parse_s3_url(key) try: self.get_conn().head_object(Bucket=bucket_name, Key=key) return True except ClientError as e: sel...
https://github.com/pytorch/vision/blob/3afcf3cd49661c466c75ea536b0b2a7ff57f9a05/torchvision/datasets/utils.py#L115-L136
def list_files(root, suffix, prefix=False): """ """ root = os.path.expanduser(root) files = list( filter( lambda p: os.path.isfile(os.path.join(root, p)) and p.endswith(suffix), os.listdir(root) ) ) if prefix is True: files = [os.path.join(root, d...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/druid_hook.py#L127-L139
def get_conn(self): """ """ conn = self.get_connection(self.druid_broker_conn_id) druid_broker_conn = connect( host=conn.host, port=conn.port, path=conn.extra_dejson.get('endpoint', '/druid/v2/sql'), scheme=conn.extra_dejson.get('s...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/sts/semilocal_linear_trend.py#L241-L263
def semilocal_linear_trend_transition_matrix(autoregressive_coef): """""" # We want to write the following 2 x 2 matrix: # [[1., 1., ], # level(t+1) = level(t) + slope(t) # [0., ar_coef], # slope(t+1) = ar_coef * slope(t) # but it's slightly tricky to properly incorporate the batch shape of # autoregr...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/s3_task_handler.py#L127-L144
def s3_read(self, remote_log_location, return_error=False): """ """ try: return self.hook.read_key(remote_log_location) except Exception: msg = 'Could not read logs from {}'.format(remote_log_location) self.log.exception(msg) # ret...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L92-L116
def Popen(self, cmd, **kwargs): """ """ masked_cmd = ' '.join(self.cmd_mask_password(cmd)) self.log.info("Executing command: {}".format(masked_cmd)) self.sp = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/mlengine_operator.py#L29-L62
def _normalize_mlengine_job_id(job_id): """ """ # Add a prefix when a job_id starts with a digit or a template match = re.search(r'\d|\{{2}', job_id) if match and match.start() == 0: job = 'z_{}'.format(job_id) else: job = job_id # Clean up 'bad' characters except temp...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/wasb_hook.py#L153-L191
def delete_file(self, container_name, blob_name, is_prefix=False, ignore_if_missing=False, **kwargs): """ """ if is_prefix: blobs_to_delete = [ blob.name for blob in self.connection.list_blobs( container_name, prefix=b...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/dbapi_hook.py#L132-L166
def run(self, sql, autocommit=False, parameters=None): """ """ if isinstance(sql, basestring): sql = [sql] with closing(self.get_conn()) as conn: if self.supports_autocommit: self.set_autocommit(conn, autocommit) with closing...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/salesforce_hook.py#L186-L293
def write_object_to_file(self, query_results, filename, fmt="csv", coerce_to_timestamp=False, record_time_added=False): """ """ fmt = fmt.lowe...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_natural_language_hook.py#L44-L53
def get_conn(self): """ """ if not self._conn: self._conn = LanguageServiceClient(credentials=self._get_credentials()) return self._conn
https://github.com/pytorch/vision/blob/3afcf3cd49661c466c75ea536b0b2a7ff57f9a05/torchvision/transforms/functional.py#L644-L677
def adjust_gamma(img, gamma, gain=1): """ if not _is_pil_image(img): raise TypeError('img should be PIL Image. Got {}'.format(type(img))) if gamma < 0: raise ValueError('Gamma should be a non-negative real number') input_mode = img.mode img = img.convert('RGB') gamma_map ...
https://github.com/pytorch/vision/blob/3afcf3cd49661c466c75ea536b0b2a7ff57f9a05/torchvision/datasets/mnist.py#L132-L164
def download(self): """""" if self._check_exists(): return makedir_exist_ok(self.raw_folder) makedir_exist_ok(self.processed_folder) # download files for url in self.urls: filename = url.rpartition('/')[2] file_path = os.path.join(se...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/bin/cli.py#L763-L868
def restart_workers(gunicorn_master_proc, num_workers_expected, master_timeout): """ """ def wait_until_true(fn, timeout=0): """ Sleeps until fn is true """ t = time.time() while not fn(): if 0 < timeout <= time.time() - t: raise Airf...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcs_hook.py#L515-L550
def compose(self, bucket_name, source_objects, destination_object): """ """ if not source_objects or not len(source_objects): raise ValueError('source_objects cannot be empty.') if not bucket_name or not destination_object: raise ValueError('bucket_name...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/gcp_transfer_operator.py#L106-L110
def _convert_date_to_dict(field_date): """ """ return {DAY: field_date.day, MONTH: field_date.month, YEAR: field_date.year}
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dag.py#L1494-L1526
def create_dagrun(self, run_id, state, execution_date, start_date=None, external_trigger=False, conf=None, session=None): """ """ re...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/distributions/joint_distribution_sequential.py#L475-L487
def _get_required_args(fn): """""" argspec = tf_inspect.getfullargspec(fn) args = argspec.args if tf_inspect.isclass(fn): args = args[1:] # Remove the `self` arg. if argspec.defaults: # Remove the args which have defaults. By convention we only feed # *required args*. This means some distribution...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/dbapi_hook.py#L55-L63
def get_conn(self): """ """ db = self.get_connection(getattr(self, self.conn_name_attr)) return self.connector.connect( host=db.host, port=db.port, username=db.login, schema=db.schema)
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/distributions/vector_diffeomixture.py#L958-L972
def softmax(x, axis, name=None): """""" with tf.name_scope(name or "softmax"): x = tf.convert_to_tensor(value=x, name="x") ndims = ( tensorshape_util.rank(x.shape) if tensorshape_util.rank(x.shape) is not None else tf.rank( x, name="ndims")) axis = tf.convert_to_tensor(value=...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_fileshare_hook.py#L83-L99
def list_directories_and_files(self, share_name, directory_name=None, **kwargs): """ """ return self.connection.list_directories_and_files(share_name, directory_name, **kw...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L174-L187
def retrieve_file(self, remote_full_path, local_full_path): """ """ conn = self.get_conn() self.log.info('Retrieving file from FTP: %s', remote_full_path) conn.get(remote_full_path, local_full_path) self.log.info('Finished retrieving file from FTP: %s', remote_fu...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/dataflow_operator.py#L363-L380
def execute(self, context): """""" bucket_helper = GoogleCloudBucketHelper( self.gcp_conn_id, self.delegate_to) self.py_file = bucket_helper.google_cloud_to_local(self.py_file) hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.dele...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L473-L504
def done(self): """ """ if self._process is None: raise AirflowException("Tried to see if it's done before starting!") if self._done: return True # In case result queue is corrupted. if self._result_queue and not self._result_queue.empty...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/distributions/hidden_markov_model.py#L926-L933
def _extract_log_probs(num_states, dist): """""" states = tf.reshape(tf.range(num_states), tf.concat([[num_states], tf.ones_like(dist.batch_shape_tensor())], axis=0)) return distribution_util.move_dimension(dist.log_prob(state...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1832-L1843
def executemany(self, operation, seq_of_parameters): """ """ for parameters in seq_of_parameters: self.execute(operation, parameters)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/macros/hive.py#L58-L80
def _closest_date(target_dt, date_list, before_target=None): """ """ fb = lambda d: target_dt - d if d <= target_dt else datetime.timedelta.max fa = lambda d: d - target_dt if d >= target_dt else datetime.timedelta.max fnone = lambda d: target_dt - d if d < target_dt else d - target_dt if b...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_spanner_hook.py#L247-L288
def update_database(self, instance_id, database_id, ddl_statements, project_id=None, operation_id=None): """ """ instance = self._get_client(project_id=project_id).instance( instance_id=instance_id) if not instance.exi...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/layers/distribution_layer.py#L1912-L1930
def _get_convert_to_tensor_fn(identifier): """""" if identifier is None: return None if isinstance(identifier, six.string_types): identifier = str(identifier) return _deserialize(identifier) if isinstance(identifier, dict): return _deserialize(identifier) if isinstance(identifier, property)...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L1271-L1301
def xcom_push( self, key, value, execution_date=None): """ """ if execution_date and execution_date < self.execution_date: raise ValueError( 'execution_date can not be in the past (current ' 'ex...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/views.py#L2481-L2490
def get_query(self): """ """ return ( super().get_query() .filter(or_(models.DagModel.is_active, models.DagModel.is_paused)) .filter(~models.DagModel.is_subdag) )
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/snowflake_hook.py#L115-L129
def _get_aws_credentials(self): """ """ if self.snowflake_conn_id: connection_object = self.get_connection(self.snowflake_conn_id) if 'aws_secret_access_key' in connection_object.extra_dejson: aws_access_key_id = connection_object.extra_dejson.get...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/bijectors/fill_triangular.py#L135-L153
def vector_size_to_square_matrix_size(d, validate_args, name=None): """""" if isinstance(d, (float, int, np.generic, np.ndarray)): n = (-1 + np.sqrt(1 + 8 * d)) / 2. if float(int(n)) != n: raise ValueError("Vector length is not a triangular number.") return int(n) else: with tf.name_scope(na...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/optimizer/linesearch/hager_zhang.py#L545-L576
def _line_search_inner_bisection( value_and_gradients_function, search_interval, active, f_lim): """""" midpoint = (search_interval.left.x + search_interval.right.x) / 2 val_mid = value_and_gradients_function(midpoint) is_valid_mid = hzl.is_finite(val_mid) still_active = active & is_valid_mid...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_spanner_hook.py#L291-L325
def delete_database(self, instance_id, database_id, project_id=None): """ """ instance = self._get_client(project_id=project_id).\ instance(instance_id=instance_id) if not instance.exists(): raise AirflowException("The instance {} does not exist in proje...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_data_lake_hook.py#L41-L53
def get_conn(self): """""" conn = self.get_connection(self.conn_id) service_options = conn.extra_dejson self.account_name = service_options.get('account_name') adlCreds = lib.auth(tenant_id=service_options.get('tenant'), client_secret=conn.password, ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sagemaker_hook.py#L627-L686
def check_status(self, job_name, key, describe_function, check_interval, max_ingestion_time, non_terminal_states=None): """ """ if not non_terminal_states: non_terminal_states = self.non_terminal_states ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L897-L954
def _process_task_instances(self, dag, queue, session=None): """ """ # update the state of the previously active dag runs dag_runs = DagRun.find(dag_id=dag.dag_id, state=State.RUNNING, session=session) active_dag_runs = [] for run in dag_runs: self.l...
https://github.com/pytorch/vision/blob/3afcf3cd49661c466c75ea536b0b2a7ff57f9a05/torchvision/datasets/utils.py#L41-L51
def makedir_exist_ok(dirpath): """ """ try: os.makedirs(dirpath) except OSError as e: if e.errno == errno.EEXIST: pass else: raise
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagbag.py#L274-L303
def kill_zombies(self, zombies, session=None): """ """ from airflow.models.taskinstance import TaskInstance # Avoid circular import for zombie in zombies: if zombie.dag_id in self.dags: dag = self.dags[zombie.dag_id] if zombie.task_i...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dates.py#L36-L111
def date_range(start_date, end_date=None, num=None, delta=None): """ """ if not delta: return [] if end_date and start_date > end_date: raise Exception("Wait. start_date needs to be before end_date") if end_date and num: raise Exception("Wait. Either specify end_date OR ...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/stats/sample_stats.py#L284-L462
def covariance(x, y=None, sample_axis=0, event_axis=-1, keepdims=False, name=None): """ """ with tf.compat.v1.name_scope( name, 'covariance', values=[x, y, event_axis, sample_axis]): x = tf.convert_to_tensor(value=x, name='x') ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L114-L131
def dag_runs(dag_id): """ """ try: state = request.args.get('state') dagruns = get_dag_runs(dag_id, state) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = 400 return response return...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L101-L122
def create_collection(self, collection_name, database_name=None): """ """ if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") # We need to check to see if this container already exists so we don't try # to create it twice ...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/distributions/uniform.py#L214-L242
def _kl_uniform_uniform(a, b, name=None): """ """ with tf.name_scope(name or "kl_uniform_uniform"): # Consistent with # http://www.mast.queensu.ca/~communications/Papers/gil-msc11.pdf, page 60 # Watch out for the change in conventions--they use 'a' and 'b' to refer to # lower and upper bounds resp...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L2071-L2076
def _validate_value(key, value, expected_type): """ """ if not isinstance(value, expected_type): raise TypeError("{} argument must have a type {} not {}".format( key, expected_type, type(value)))
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/decorators.py#L61-L94
def gzipped(f): """ """ @functools.wraps(f) def view_func(*args, **kwargs): @after_this_request def zipper(response): accept_encoding = request.headers.get('Accept-Encoding', '') if 'gzip' not in accept_encoding.lower(): return response ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/hive_hooks.py#L300-L372
def load_df( self, df, table, field_dict=None, delimiter=',', encoding='utf8', pandas_kwargs=None, **kwargs): """ """ def _infer_field_types_from_df(df): DTYPE_KIND_HIVE_TYPE = { ...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/examples/logistic_regression.py#L134-L152
def build_input_pipeline(x, y, batch_size): """ """ training_dataset = tf.data.Dataset.from_tensor_slices((x, y)) training_batches = training_dataset.repeat().batch(batch_size) training_iterator = tf.compat.v1.data.make_one_shot_iterator(training_batches) batch_features, batch_labels = training_iterator.get...
https://github.com/asciimoo/searx/blob/a84caa22cf947e973c10aa968d35fb2bdda6d048/searx/webapp.py#L668-L725
def preferences(): """""" # save preferences if request.method == 'POST': resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index')))) try: request.preferences.parse_form(request.form) except ValidationException: request.errors.ap...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/distributions/finite_discrete.py#L248-L297
def _maybe_validate_args(outcomes, logits, probs, validate_args): """""" assertions = [] def validate_equal_last_dim(tensor_a, tensor_b, message): if tensor_a.shape.is_fully_defined() and tensor_b.shape.is_fully_defined(): if tensor_a.shape[-1] != tensor_b.shape[-1]: raise ValueError(message) ...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/experimental/no_u_turn_sampler/logistic_regression.py#L58-L64
def logistic_regression(features): """""" coeffs = ed.MultivariateNormalDiag( loc=tf.zeros(features.shape[1]), name="coeffs") labels = ed.Bernoulli( logits=tf.tensordot(features, coeffs, [[1], [0]]), name="labels") return labels
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/internal/special_math.py#L186-L288
def _ndtri(p): """""" # Constants used in piece-wise rational approximations. Taken from the cephes # library: # https://root.cern.ch/doc/v608/SpecFuncCephesInv_8cxx_source.html p0 = list(reversed([-5.99633501014107895267E1, 9.80010754185999661536E1, -5.66762857469...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_kms_hook.py#L58-L83
def encrypt(self, key_name, plaintext, authenticated_data=None): """ """ keys = self.get_conn().projects().locations().keyRings().cryptoKeys() body = {'plaintext': _b64encode(plaintext)} if authenticated_data: body['additionalAuthenticatedData'] = _b64encode(...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/edward2/interceptor.py#L175-L195
def interceptable(func): """ """ @functools.wraps(func) def func_wrapped(*args, **kwargs): with get_next_interceptor() as interceptor: return interceptor(func, *args, **kwargs) return func_wrapped
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/file.py#L42-L59
def mkdirs(path, mode): """ """ try: o_umask = os.umask(0) os.makedirs(path, mode) except OSError: if not os.path.isdir(path): raise finally: os.umask(o_umask)
https://github.com/pytorch/vision/blob/3afcf3cd49661c466c75ea536b0b2a7ff57f9a05/torchvision/datasets/utils.py#L139-L171
def download_file_from_google_drive(file_id, root, filename=None, md5=None): """ """ # Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url import requests url = "https://docs.google.com/uc?export=download" root = os.path.expanduser(root) i...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/mssql_to_gcs.py#L201-L211
def _upload_to_gcs(self, files_to_upload): """ """ hook = GoogleCloudStorageHook( google_cloud_storage_conn_id=self.google_cloud_storage_conn_id, delegate_to=self.delegate_to) for object_name, tmp_file_handle in files_to_upload.items(): hook.u...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/pool.py#L60-L69
def open_slots(self, session): """ """ from airflow.models.taskinstance import \ TaskInstance as TI # Avoid circular import used_slots = session.query(func.count()).filter(TI.pool == self.pool).filter( TI.state.in_([State.RUNNING, State.QUEUED])).scalar...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/internal/backend/numpy/math.py#L191-L202
def _reduce_logsumexp(input_tensor, axis=None, keepdims=False, name=None): # pylint: disable=unused-argument """""" try: return scipy_special.logsumexp( input_tensor, axis=_astuple(axis), keepdims=keepdims) except NotImplementedError: # We offer a non SP version just in case SP isn't installed an...
https://github.com/soimort/you-get/blob/b746ac01c9f39de94cac2d56f665285b0523b974/src/you_get/extractors/wanmen.py#L69-L84
def wanmen_download_by_course_topic_part(json_api_content, tIndex, pIndex, output_dir='.', merge=True, info_only=False, **kwargs): """""" html = json_api_content title = _wanmen_get_title_by_json_topic_part(html, tIndex, ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sagemaker_hook.py#L134-L155
def tar_and_s3_upload(self, path, key, bucket): """ """ with tempfile.TemporaryFile() as temp_file: if os.path.isdir(path): files = [os.path.join(path, name) for name in os.listdir(path)] else: files = [path] with tarfi...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/wasb_hook.py#L84-L100
def load_file(self, file_path, container_name, blob_name, **kwargs): """ """ # Reorder the argument order from airflow.hooks.S3_hook.load_file. self.connection.create_blob_from_path(container_name, blob_name, file_path, **kwargs)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/slack_webhook_operator.py#L84-L99
def execute(self, context): """ """ self.hook = SlackWebhookHook( self.http_conn_id, self.webhook_token, self.message, self.attachments, self.channel, self.username, self.icon_emoji, self.lin...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py#L83-L93
def get_hook(self): """ """ if not hasattr(self, 'hook'): from airflow.contrib.hooks.aws_glue_catalog_hook import AwsGlueCatalogHook self.hook = AwsGlueCatalogHook( aws_conn_id=self.aws_conn_id, region_name=self.region_name) ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/executors/kubernetes_executor.py#L493-L511
def _make_safe_label_value(string): """ """ MAX_LABEL_LEN = 63 safe_label = re.sub(r'^[^a-z0-9A-Z]*|[^a-zA-Z0-9_\-\.]|[^a-z0-9A-Z]*$', '', string) if len(safe_label) > MAX_LABEL_LEN or string != safe_label: safe_hash = hashlib.md5(string.encode()).hexdigest...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/examples/disentangled_vae.py#L887-L927
def sample_dynamic_prior(self, samples, batch_size, length, fixed=False): """ """ if fixed: sample_batch_size = 1 else: sample_batch_size = batch_size sample, state = self.dynamic_prior.zero_state([samples, sample_batch_size]) locs = [] scale_diags = [] sample_list = [] ...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/sts/fitting.py#L267-L282
def _minimize_in_graph(build_loss_fn, num_steps=200, optimizer=None): """""" optimizer = tf.compat.v1.train.AdamOptimizer( 0.1) if optimizer is None else optimizer def train_loop_body(step): train_op = optimizer.minimize( build_loss_fn if tf.executing_eagerly() else build_loss_fn()) return ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcs_hook.py#L477-L513
def insert_object_acl(self, bucket_name, object_name, entity, role, user_project=None): """ """ self.log.info('Creating a new ACL entry for object: %s in bucket: %s', object_name, bucket_name) client = self.get_conn() bucket = client.bucket(bucket_n...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/distributions/vector_exponential_linear_operator.py#L278-L287
def _mode_mean_shape(self): """""" shape = tensorshape_util.concatenate(self.batch_shape, self.event_shape) has_static_shape = tensorshape_util.is_fully_defined(shape) if not has_static_shape: shape = tf.concat([ self.batch_shape_tensor(), self.event_shape_tensor(), ], 0)...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L690-L822
def _check_and_change_state_before_execution( self, verbose=True, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ignore_ti_state=False, mark_success=False, test_mode=False, job_id=No...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/distributions/hidden_markov_model.py#L908-L911
def _log_vector_matrix(vs, ms): """""" return tf.reduce_logsumexp(input_tensor=vs[..., tf.newaxis] + ms, axis=-2)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/configuration.py#L309-L344
def getsection(self, section): """ """ if (section not in self._sections and section not in self.airflow_defaults._sections): return None _section = copy.deepcopy(self.airflow_defaults._sections[section]) if section in self._sections: ...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/layers/distribution_layer.py#L1309-L1349
def _make_kl_divergence_fn( distribution_b, use_exact_kl=False, test_points_reduce_axis=(), # `None` == "all"; () == "none". test_points_fn=tf.convert_to_tensor, weight=None): """""" if use_exact_kl is None: kl_divergence_fn = tfd.kl_divergence else: # Closure over: test_points_fn, t...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/mcmc/slice_sampler_kernel.py#L381-L537
def _sample_next(target_log_prob_fn, current_state_parts, step_sizes, max_doublings, current_target_log_prob, batch_rank, seed=None, name=None): """ """ with tf.compat.v1.name_scope(name, 'sample...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/examples/disentangled_vae.py#L1055-L1078
def summarize_mean_in_nats_and_bits(inputs, units, name, nats_name_scope="nats", bits_name_scope="bits_per_dim"): """ """ mean = tf.reduce_mean(input_tensor=inputs) with tf.compat.v1.name_scope(nats_name_scope): tf.compat.v2.summary.sca...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/webhdfs_hook.py#L56-L79
def get_conn(self): """ """ connections = self.get_connections(self.webhdfs_conn_id) for connection in connections: try: self.log.debug('Trying namenode %s', connection.host) client = self._get_client(connection) clien...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_sql_hook.py#L205-L226
def create_database(self, instance, body, project_id=None): """ """ response = self.get_conn().databases().insert( project=project_id, instance=instance, body=body ).execute(num_retries=self.num_retries) operation_name = response["name...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/slack_webhook_hook.py#L121-L134
def execute(self): """ """ proxies = {} if self.proxy: # we only need https proxy for Slack, as the endpoint is https proxies = {'https': self.proxy} slack_message = self._build_slack_message() self.run(endpoint=self.webhook_token, ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/gcp_transfer_operator.py#L113-L117
def _convert_time_to_dict(time): """ """ return {HOURS: time.hour, MINUTES: time.minute, SECONDS: time.second}
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_vision_hook.py#L182-L207
def update_product_set( self, product_set, location=None, product_set_id=None, update_mask=None, project_id=None, retry=None, timeout=None, metadata=None, ): """ """ client = self.get_conn() product_set ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sagemaker_hook.py#L325-L366
def create_training_job(self, config, wait_for_completion=True, print_log=True, check_interval=30, max_ingestion_time=None): """ """ self.check_training_config(config) response = self.get_conn().create_training_job(**config) if print_log: ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1341-L1353
def get_schema(self, dataset_id, table_id): """ """ tables_resource = self.service.tables() \ .get(projectId=self.project_id, datasetId=dataset_id, tableId=table_id) \ .execute(num_retries=self.num_retries) return tables_resource['schema']
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L74-L89
def check_query_status(self, query_execution_id): """ """ response = self.conn.get_query_execution(QueryExecutionId=query_execution_id) state = None try: state = response['QueryExecution']['Status']['State'] except Exception as ex: self.lo...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/util/docstring.py#L30-L51
def expand_docstring(**kwargs): """ """ def _fn_wrapped(fn): """Original function with modified `__doc__` attribute.""" doc = inspect.cleandoc(fn.__doc__) for k, v in six.iteritems(kwargs): # Capture each ${k} reference to replace with v. # We wrap the replacement in a function so no backs...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/experimental/fun_mcmc/fun_mcmc_lib.py#L181-L239
def transform_log_prob_fn(log_prob_fn: PotentialFn, bijector: BijectorNest, init_state: State = None ) -> Union[PotentialFn, Tuple[PotentialFn, State]]: """ """ def wrapper(*args): """Transformed wrapper.""" bijector_ = bijector...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/executors/kubernetes_executor.py#L458-L470
def _strip_unsafe_kubernetes_special_chars(string): """ """ return ''.join(ch.lower() for ind, ch in enumerate(string) if ch.isalnum())
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/examples/disentangled_vae.py#L216-L229
def call(self, inputs): """ """ del inputs # unused with tf.compat.v1.name_scope(self._name): return tfd.MultivariateNormalDiag(self.loc, self.scale_diag)
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/distributions/lkj.py#L371-L410
def _log_unnorm_prob(self, x, name=None): """ """ with tf.name_scope(name or 'log_unnorm_prob_lkj'): x = tf.convert_to_tensor(value=x, name='x') # The density is det(matrix) ** (concentration - 1). # Computing the determinant with `logdet` is usually fine, since # correlation matrice...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcs_hook.py#L340-L358
def get_crc32c(self, bucket_name, object_name): """ """ self.log.info('Retrieving the crc32c checksum of ' 'object_name: %s in bucket_name: %s', object_name, bucket_name) client = self.get_conn() bucket = client.get_bucket(bucket_name=bucket_name) ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagbag.py#L341-L396
def collect_dags( self, dag_folder=None, only_if_updated=True, include_examples=configuration.conf.getboolean('core', 'LOAD_EXAMPLES'), safe_mode=configuration.conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE')): """ """ start_...
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/distributions/gumbel.py#L201-L224
def _kl_gumbel_gumbel(a, b, name=None): """ """ with tf.name_scope(name or "kl_gumbel_gumbel"): # Consistent with # http://www.mast.queensu.ca/~communications/Papers/gil-msc11.pdf, page 64 # The paper uses beta to refer to scale and mu to refer to loc. # There is actually an error in the solution ...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_sql_hook.py#L601-L615
def get_proxy_version(self): """ """ self._download_sql_proxy_if_needed() command_to_run = [self.sql_proxy_path] command_to_run.extend(['--version']) command_to_run.extend(self._get_credential_parameters()) result = subprocess.check_output(command_to_run)...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dates.py#L114-L188
def round_time(dt, delta, start_date=timezone.make_aware(datetime.min)): """ """ if isinstance(delta, six.string_types): # It's cron based, so it's easy tz = start_date.tzinfo start_date = timezone.make_naive(start_date, tz) cron = croniter(delta, start_date) pr...
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L2118-L2366
def _process_backfill_task_instances(self, ti_status, executor, pickle_id, start_date=None, session=None): """ """ execute...