gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
__author__ = 'James'
from monopyly import *
from .property_probs_calcs import PropertyProbCalcs
from .property_sim import *
from .board_utils import *
class DecisionUtils():
def __init__(self, property_probs):
self.property_probs = property_probs
self.board_utils = BoardUtils(self.property_probs)
self.multiplier_max_price = 2
self.multiplier_50_perc_owned = 2
self.multiplier_50_perc_owned_by_one_other = 1.5
self.multiplier_own_one_other_available = 1.5
self.multiplier_station = 1.5
self.multiplier_first_property_in_set = 1.25
#How worried I am that others own properties and I don't
self.multiplier_worry = 1
self.turns_ive_had_in_game=0
def percentage_of_sets_player_owns_to_average_num_others_own(self, game_state, player):
#Workout the number of sets I own
#The number of sets others own on average
#What percentage above the average do I have?
owned_sets = game_state.board.get_owned_sets(True)
all_players = game_state.players
total_owned_by_me = 0
total_owned_by_all_others = 0
average_owned_by_others=0
for k in owned_sets.keys():
if player.is_same_player(k) :
total_owned_by_me = len(owned_sets[k])
else:
total_owned_by_all_others = total_owned_by_all_others + len(owned_sets[k])
average_owned_by_others = total_owned_by_all_others / (len(all_players) - 1)
if average_owned_by_others > 0:
percentage_greater_i_own = 100 * ((total_owned_by_me - average_owned_by_others) / average_owned_by_others)
else:
percentage_greater_i_own = 100
return percentage_greater_i_own
def number_of_monopolies_player_would_get(self,property_list_they_would_get,player):
#Returns the NUMBER of monopolies a player would get IF they got all these proposed properties
total_monopolies_player_would_get=0
sets_already_checked=[]
for prop in property_list_they_would_get:
#Check if it would give me a monopoly
gives_a_monopoly_flag=True
if (not prop.property_set in sets_already_checked and not(prop.property_set.set_enum == PropertySet.UTILITY
or prop.property_set.set_enum ==PropertySet.STATION)):
for other_prop_in_set in prop.property_set.properties:
#Loop through all the properties in the set
#If Except for the properties in question, they would get a set
#then say that this would give them a monopoly
if other_prop_in_set in property_list_they_would_get:
#The property in this set is in the list of the proposed - still a chance for them to
#get a monopoly
pass
else:
if player.is_same_player(other_prop_in_set.owner):
#the property in this set has the same owner - not looking good
pass
else:
#There's a different owner, all fine
gives_a_monopoly_flag=False
break
if gives_a_monopoly_flag == True:
total_monopolies_player_would_get=total_monopolies_player_would_get + 1
sets_already_checked.append(prop.property_set)
return total_monopolies_player_would_get
def amount_property_is_worth_to_a_player(self, game_state, property, player):
#Decide how much a property is worth
#TODO: How to split the amounts to offer AND how much to cap them by?
#TODO: Could we look at other player's cash amounts to judge how much to offer?
#property_price=property.price
#property_price=0
#percentage_greater_player_owns_to_others=self.percentage_of_sets_player_owns_to_average_num_others_own(game_state,player)
worry_multiplier=1
if isinstance(property,Street):
avg_expected_return_1_turn=self.property_probs.income_per_turn(property.name,num_houses=3, set_owned=True,
number_of_stations_owned=None)
elif isinstance(property,Station):
avg_expected_return_1_turn=self.property_probs.income_per_turn(property.name,num_houses=None, set_owned=True,
number_of_stations_owned=2)
else:
#For a utility
avg_expected_return_1_turn=1
#Assume 25 turns per game - after this we won't have long enough to get any money - so property is worth it's
#face value
if self.turns_ive_had_in_game <40:
avg_expected_return_remaining_turns=avg_expected_return_1_turn * (40-self.turns_ive_had_in_game)
else:
avg_expected_return_remaining_turns=0
amount_property_is_worth=avg_expected_return_remaining_turns
'''
if percentage_greater_player_owns_to_others < 0:
worry_multiplier = - percentage_greater_player_owns_to_others * self.multiplier_worry
percentage_of_set_player_owns = self.board_utils.percentage_of_property_set_owned_by_player(game_state.board, player, property.property_set)
percentage_of_set_still_available = self.board_utils.percentage_of_property_set_owned_by_player(game_state.board, None, property.property_set)
if percentage_of_set_player_owns >= 50:
#If we have 50% or MORE of properties in this set - then we can offer a lot
amount_property_is_worth = avg_expected_return_remaining_turns + (worry_multiplier *
self.multiplier_50_perc_owned
* property_price)
elif self.board_utils.max_percentage_of_property_owned_by_single_player(game_state, property.property_set) >= 50:
#If someone else has 50% or more of properties in a set then we can offer a lot
#to block their progress
amount_property_is_worth = avg_expected_return_remaining_turns+(worry_multiplier
* self.multiplier_50_perc_owned_by_one_other
* property_price)
elif percentage_of_set_player_owns > 0 and percentage_of_set_still_available > 0:
#If we have some properties in the set and others are still available we can offer a reasonable amount
#TODO: What amount to offer for "reasonable"?
#Offer 3/4 of our maximum price for this property
amount_property_is_worth = avg_expected_return_remaining_turns+(worry_multiplier *
self.multiplier_own_one_other_available
* property_price)
elif self.board_utils.is_station(property.name):
#If it is a station then we can offer a reasonable amount
amount_property_is_worth = avg_expected_return_remaining_turns+(worry_multiplier
* self.multiplier_station * property_price)
elif percentage_of_set_still_available == 100:
#First property available in a set
amount_property_is_worth = avg_expected_return_remaining_turns+(worry_multiplier
* self.multiplier_first_property_in_set
* property_price)
else:
#If it is a utility or anything else then we offer face value
amount_property_is_worth = property_price+ avg_expected_return_remaining_turns
'''
#Make sure we return an integer amount
return int(max(amount_property_is_worth,property.price))
def best_house_to_sell(self, sim_property_list):
# returns the best house to sell as a property name
# expects a list of properties that ARE streets with houses
prob_calc=PropertyProbCalcs()
#TODO: check this a good strategy
#Sell the house that gives the LEAST lost income
#Could also be least lost income PER cost of house
#Could also consider the amount of money we're trying to raise
#That gets difficult though,and needs to be considered as part
#of a larger group of decisions as to which MULTIPLE
#player actions is the best route
#Current problem is that we test all these different properties - and the number of houses removed - STAYS removed
#If it's NOT the best property we need to replace the houses
best_property = None
min_income_found=9999
for sim_property in sim_property_list:
current_test_income_loss = 99999
if sim_property.sim_number_of_houses > 0:
if self.adding_house_leaves_set_balanced(sim_property , sim_property_list, -1):
current_test_income_loss=prob_calc.income_per_turn(sim_property.name,sim_property.sim_number_of_houses, True )
if current_test_income_loss< min_income_found:
min_income_found = current_test_income_loss
best_property=sim_property
return best_property
def adding_house_leaves_set_balanced (self, sim_property_with_house_to_add,sim_properties, number_of_houses_to_add):
#Assume the list passed in contains ALL the properties in a set PLUS some extra ones
#The aim is to add/subtract one for houses on a property - and see if the set still balances
#If it DOESN'T balance we replace/remove the house
#Create a list of properties in this set
property_sim_set=[]
for prop in sim_properties:
if prop.property_set == sim_property_with_house_to_add.property_set:
if prop.name == sim_property_with_house_to_add.name:
prop.sim_number_of_houses = prop.sim_number_of_houses + number_of_houses_to_add
property_sim_set.append(prop)
#Add the specified number of houses to the correct property
houses_for_each_property = [p.sim_number_of_houses for p in property_sim_set]
if max(houses_for_each_property) <= 5 and (max(houses_for_each_property) - min(houses_for_each_property)) <= 1:
#Always replace the house
sim_property_with_house_to_add.sim_number_of_houses = sim_property_with_house_to_add.sim_number_of_houses - number_of_houses_to_add
return True
else:
#Always replace the house
sim_property_with_house_to_add.sim_number_of_houses = sim_property_with_house_to_add.sim_number_of_houses - number_of_houses_to_add
return False
def improve_properties(self, owned_sim_properties , spare_cash):
#Loop through all properties i own where i can build
#and add a house on the one with maximum expected income
#TODO:Could also build on the place that is quickest
#to recoup accumulated cost
#TODO:Could also build according to where
#players are on the board AND who we want to victimise
prob_calc=PropertyProbCalcs()
remaining_spare_cash = spare_cash
max_income_found=-9999
found_prop_to_improve = True
while found_prop_to_improve:
found_prop_to_improve = False
for sim_property in owned_sim_properties :
current_test_income_gain = -99999
if sim_property.sim_number_of_houses < 5 and sim_property.property_set.house_price <= remaining_spare_cash:
if self.adding_house_leaves_set_balanced(sim_property , owned_sim_properties, 1):
current_test_income_gain=prob_calc.income_per_turn(sim_property.name,sim_property.sim_number_of_houses, True )
if current_test_income_gain > max_income_found:
found_prop_to_improve = True
max_income_found = current_test_income_gain
best_property = sim_property
if found_prop_to_improve:
remaining_spare_cash -= best_property.property_set.house_price
best_property.sim_number_of_houses = best_property.sim_number_of_houses + 1
return owned_sim_properties
def best_property_to_mortgage(self, game_state, my_own_player, sim_property_set_for_mortgage_calcs):
#Find the best property to mortgage. This is one with lowest lost of expected income
#TODO: Could also be those with highest number of rolls before mortgage value is lost
min_income_loss=9999
best_property=None
for prop in sim_property_set_for_mortgage_calcs:
set_owned = (prop.property_set.owner == my_own_player)
if prop.is_mortgaged == False:
if set_owned and prop.property_set in [p.property_set for p in self.board_utils.get_properties_i_own_with_houses(game_state.board, my_own_player)]:
#This property or one of the set has houses - can't be mortgaged
pass
else:
if self.board_utils.is_utility(prop.name):
#Always mortgage a utility
return prop
if self.board_utils.is_station(prop.name):
current_income_loss = self.property_probs.income_per_turn(prop.name, None, None, self.board_utils.number_of_stations_owned(
game_state.board, my_own_player))
else:
current_income_loss = self.property_probs.income_per_turn(prop.name, 0, set_owned)
if min_income_loss > current_income_loss:
min_income_loss = current_income_loss
best_property = prop
return best_property
def best_property_to_unmortgage(self, game_state, my_own_player, sim_property_set_for_mortgage_calcs):
#Find the best property to unmortgage. This is one with lowest lost of expected income
#TODO: Could also be those with lowest number of rolls before mortgage value is lost
max_income_loss=-9999
best_property=None
for prop in sim_property_set_for_mortgage_calcs:
set_owned = (prop.property_set.owner == my_own_player)
if prop.is_mortgaged == True:
if self.board_utils.is_utility(prop.name):
#Never unmortgage a utility
#TODO: Unless it's the ONLY mortgaged property
continue
if self.board_utils.is_station(prop.name):
current_income_gain = self.property_probs.income_per_turn(prop.name, None, None, self.board_utils.number_of_stations_owned(
game_state.board, my_own_player))
else:
current_income_gain = self.property_probs.income_per_turn(prop.name, 0, set_owned)
if max_income_loss < current_income_gain:
max_income_loss = current_income_gain
best_property = prop
return best_property
|
|
# ----------------------------------------------------------------------------
# Copyright (c) 2014-2016, 'prx' developers (see AUTHORS file)
# All rights reserved.
#
# Distributed under the terms of the MIT license.
#
# The full license is in the LICENSE file, distributed with this software.
# ----------------------------------------------------------------------------
"""Function classes for polynomial functions and related indicators."""
from __future__ import division
import numpy as np
from . import base as _base
from ..fun.norms import l2normsqhalf
from ..operator_class import DiagLinop
__all__ = (
'Quadratic', 'Affine', 'Const', 'PointInd',
)
class Quadratic(_base.BaseFunction):
"""Function class for a quadratic function.
This function is defined as::
f(x) = s*l2normsqhalf(A(x)) + Re(<b, x>) + c,
where `A` is a linear operator or matrix, `b` is an array specifying
the linear term, `c` is a scalar constant, and `s` is a scaling
constant.
{function_summary}
Attributes
----------
{quadratic_params}
{function_attributes}
See Also
--------
Affine : Subtype with a zero quadratic term.
Const : Subtype with zero quadratic and linear terms.
.BaseFunction : Parent class.
Notes
-----
{function_notes}
"""
_doc_quadratic_params = """
A : :class:`.LinearOperator`
Linear operator defining the quadratic term.
b : array
Linear term.
c : float | int
Constant term.
s : float | int
Scaling of quadratic term.
"""
def __new__(
cls, A=None, b=None, c=None, s=1, scale=None, stretch=None, shift=None,
linear=None, const=None,
):
# if quadratic term is None, want to define an Affine or Const
if A is None:
# if linear terms are None, want to define a Const function
if b is None and linear is None:
obj = super(Quadratic, cls).__new__(
Const, scale=scale, stretch=stretch, shift=shift,
linear=linear, const=const,
)
return obj
# otherwise we do want an Affine function
# must specify explicitly since this is also called from Const
else:
obj = super(Quadratic, cls).__new__(
Affine, scale=scale, stretch=stretch, shift=shift,
linear=linear, const=const,
)
return obj
# otherwise we do want a Quadratic function
# must specify explicitly since this is also called from subtypes
else:
obj = super(Quadratic, cls).__new__(
Quadratic, scale=scale, stretch=stretch, shift=shift,
linear=linear, const=const,
)
return obj
def __init__(
self, A=None, b=None, c=None, s=1, scale=None, stretch=None,
shift=None, linear=None, const=None,
):
"""Create Function object for a quadratic function.
{init_summary}
Parameters
----------
{quadratic_params}
{init_params}
"""
# change 'None's to identities
if A is None:
A = DiagLinop(0)
if b is None:
b = 0
if c is None:
c = 0
if scale is None:
scale = 1
if stretch is None:
stretch = 1
if shift is None:
shift = 0
if linear is None:
linear = 0
if const is None:
const = 0
# NOTE: can't do anything to simplify or combine shift term
# since we don't know what size input the LinearOperator accepts
# combine constant terms
c = scale*c + const
const = None
# combine linear terms
b = scale*np.conj(stretch)*b + linear
linear = None
# combine scale and stretch into quadratic coefficient
s = s*scale*np.abs(stretch)**2
scale = None
stretch = None
self._A = A
self._b = b
self._bconj = np.conj(b)
self._c = c
self._s = s
super(Quadratic, self).__init__(
scale=scale, stretch=stretch, shift=shift,
linear=linear, const=const,
)
# @property
# def _conjugate_class(self):
# return Quadratic
#
# @property
# def _conjugate_args(self):
# # The conjugate of the point indicator is the affine function with
# # the point as the linear term.
# kwargs = super(PointInd, self)._conjugate_args
# kwargs.update(b=self._p)
# return kwargs
@property
def A(self):
"""Linear operator defining the quadratic term."""
return self._A
@property
def b(self):
"""Vector defining the linear term: Re(<b, x>)."""
return self._b
@property
def c(self):
"""Constant term."""
return self._c
@property
def s(self):
"""Scaling of quadratic term."""
return self._s
def fun(self, x):
"""Quadratic function."""
quad = self._s*l2normsqhalf(self._A(x))
# TODO replace multiply/sum with inner1d when it is available in numpy
lin = np.multiply(self._bconj, x).sum().real
return quad + lin + self._c
def grad(self, x):
"""Gradient of quadratic function."""
return self._s*self._A.H(self._A(x)) + self._b.real
# def prox(self, x, lmbda=1):
# """Prox of an quadratic function."""
# return self._A.ideninv(x - lmbda*self._b, lmbda*self._s)
class Affine(Quadratic):
# @property
# def _conjugate_class(self):
# return PointInd
#
# @property
# def _conjugate_args(self):
# # The conjugate of the point indicator is the affine function with
# # the point as the linear term.
# kwargs = super(PointInd, self)._conjugate_args
# kwargs.update(b=self._p)
# return kwargs
def fun(self, x):
"""Affine function."""
# TODO replace multiply/sum with inner1d when it is available in numpy
return np.multiply(self._bconj, x).sum().real + self._c
def grad(self, x):
"""Gradient of affine function."""
return self._b.real
def prox(self, x, lmbda=1):
"""Prox of an affine function."""
return x - lmbda*self._b
class Const(Affine):
# @property
# def _conjugate_class(self):
# return Infinity
def fun(self, x):
"""Constant function."""
return self._c
@staticmethod
def grad(x):
"""Zero vector, the gradient of a constant."""
return np.zeros_like(x)
@staticmethod
def prox(x, lmbda=1):
"""Identity function, the prox operator of a constant function."""
return x
class PointInd(_base.IndicatorFunction):
"""Function class for the point indicator function.
{function_summary}
Attributes
----------
{point_params}
{function_attributes}
See Also
--------
.IndicatorFunction : Parent class.
Notes
-----
The indicator function is zero at the given point p and infinity
everywhere else. Its gradient is undefined.
{function_notes}
The prox operator returns the defining point.
"""
_doc_point_params = """
p : array
The point at which this function is defined.
"""
def __init__(
self, p, scale=None, stretch=None, shift=None, linear=None, const=None,
):
"""Create Function that defines an indicator function.
{init_summary}
Parameters
----------
{point_params}
{init_params}
"""
# linear can be eliminated by evaluating at point and bringing into
# const
if linear is not None:
linconst = np.vdot(linear, p)
if const is not None:
const = const + linconst
else:
const = linconst
linear = None
# stretch and shift can be eliminated by absorbing into point
if shift is not None:
p = p - shift
shift = None
if stretch is not None:
p = p/stretch
stretch = None
self._p = p
# we can also eliminate scaling,
# but this is taken care of by parent class
super(PointInd, self).__init__(
scale=scale, stretch=stretch, shift=shift,
linear=linear, const=const,
)
@property
def _conjugate_class(self):
return Affine
@property
def _conjugate_args(self):
# The conjugate of the point indicator is the affine function with
# the point as the linear term.
kwargs = super(PointInd, self)._conjugate_args
kwargs.update(b=self._p)
return kwargs
@property
def p(self):
"""The point at which this function is defined."""
return self._p
def fun(self, x):
"""Indicator function for the point p."""
if np.allclose(x, self._p):
return 0
else:
return np.inf
def prox(self, x, lmbda=1):
"""Projection onto the point p (always returns p)."""
return self._p
|
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_config import cfg
import six
from heat.common import exception
from heat.common import template_format
from heat.engine.clients.os import glance
from heat.engine.clients.os import neutron
from heat.engine.clients.os import sahara
from heat.engine.resources.openstack.sahara import cluster as sc
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
cluster_stack_template = """
heat_template_version: 2013-05-23
description: Hadoop Cluster by Sahara
resources:
super-cluster:
type: OS::Sahara::Cluster
properties:
name: super-cluster
plugin_name: vanilla
hadoop_version: 2.3.0
cluster_template_id: some_cluster_template_id
default_image_id: some_image
key_name: admin
neutron_management_network: some_network
shares:
- id: some_share_id
access_level: ro
"""
class FakeCluster(object):
def __init__(self, status='Active'):
self.status = status
self.id = "some_id"
self.name = "super-cluster"
self.info = {"HDFS": {"NameNode": "hdfs://hostname:port",
"Web UI": "http://host_ip:port"}}
self.to_dict = lambda: {"cluster": "info"}
class SaharaClusterTest(common.HeatTestCase):
def setUp(self):
super(SaharaClusterTest, self).setUp()
self.patchobject(sc.constraints.CustomConstraint, '_is_valid'
).return_value = True
self.patchobject(glance.GlanceClientPlugin,
'find_image_by_name_or_id'
).return_value = 'some_image_id'
self.patchobject(neutron.NeutronClientPlugin, '_create')
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id',
return_value='some_network_id')
self.sahara_mock = mock.MagicMock()
self.patchobject(sahara.SaharaClientPlugin, '_create'
).return_value = self.sahara_mock
self.patchobject(sahara.SaharaClientPlugin, 'validate_hadoop_version'
).return_value = None
self.cl_mgr = self.sahara_mock.clusters
self.fake_cl = FakeCluster()
self.t = template_format.parse(cluster_stack_template)
def _init_cluster(self, template):
self.stack = utils.parse_stack(template)
cluster = self.stack['super-cluster']
return cluster
def _create_cluster(self, template):
cluster = self._init_cluster(template)
self.cl_mgr.create.return_value = self.fake_cl
self.cl_mgr.get.return_value = self.fake_cl
scheduler.TaskRunner(cluster.create)()
self.assertEqual((cluster.CREATE, cluster.COMPLETE),
cluster.state)
self.assertEqual(self.fake_cl.id, cluster.resource_id)
return cluster
def test_cluster_create(self):
self._create_cluster(self.t)
expected_args = ('super-cluster', 'vanilla', '2.3.0')
expected_kwargs = {'cluster_template_id': 'some_cluster_template_id',
'user_keypair_id': 'admin',
'default_image_id': 'some_image_id',
'net_id': 'some_network_id',
'use_autoconfig': None,
'shares': [{'id': 'some_share_id',
'access_level': 'ro',
'path': None}]}
self.cl_mgr.create.assert_called_once_with(*expected_args,
**expected_kwargs)
self.cl_mgr.get.assert_called_once_with(self.fake_cl.id)
def test_cluster_create_fails(self):
cfg.CONF.set_override('action_retry_limit', 0)
cluster = self._init_cluster(self.t)
self.cl_mgr.create.return_value = self.fake_cl
self.cl_mgr.get.return_value = FakeCluster(status='Error')
create_task = scheduler.TaskRunner(cluster.create)
ex = self.assertRaises(exception.ResourceFailure, create_task)
expected = ('ResourceInError: resources.super-cluster: '
'Went to status Error due to "Unknown"')
self.assertEqual(expected, six.text_type(ex))
def test_cluster_check_delete_complete_error(self):
cluster = self._create_cluster(self.t)
self.cl_mgr.get.side_effect = [
self.fake_cl,
sahara.sahara_base.APIException()]
self.cl_mgr.get.reset_mock()
delete_task = scheduler.TaskRunner(cluster.delete)
ex = self.assertRaises(exception.ResourceFailure, delete_task)
expected = "APIException: resources.super-cluster: None"
self.assertEqual(expected, six.text_type(ex))
self.cl_mgr.delete.assert_called_once_with(self.fake_cl.id)
self.assertEqual(2, self.cl_mgr.get.call_count)
def test_cluster_delete_cluster_in_error(self):
cluster = self._create_cluster(self.t)
self.cl_mgr.get.side_effect = [
self.fake_cl,
FakeCluster(status='Error')]
self.cl_mgr.get.reset_mock()
delete_task = scheduler.TaskRunner(cluster.delete)
ex = self.assertRaises(exception.ResourceFailure, delete_task)
expected = ('ResourceInError: resources.super-cluster: '
'Went to status Error due to "Unknown"')
self.assertEqual(expected, six.text_type(ex))
self.cl_mgr.delete.assert_called_once_with(self.fake_cl.id)
self.assertEqual(2, self.cl_mgr.get.call_count)
def test_cluster_resolve_attribute(self):
cluster = self._create_cluster(self.t)
self.cl_mgr.get.reset_mock()
self.assertEqual(self.fake_cl.info,
cluster._resolve_attribute('info'))
self.assertEqual(self.fake_cl.status,
cluster._resolve_attribute('status'))
self.assertEqual({"cluster": "info"}, cluster.FnGetAtt('show'))
self.assertEqual(3, self.cl_mgr.get.call_count)
def test_cluster_create_no_image_anywhere_fails(self):
self.t['resources']['super-cluster']['properties'].pop(
'default_image_id')
self.sahara_mock.cluster_templates.get.return_value = mock.Mock(
default_image_id=None)
cluster = self._init_cluster(self.t)
ex = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(cluster.create))
self.assertIsInstance(ex.exc, exception.StackValidationFailed)
self.assertIn("default_image_id must be provided: "
"Referenced cluster template some_cluster_template_id "
"has no default_image_id defined.",
six.text_type(ex.message))
def test_cluster_validate_no_network_on_neutron_fails(self):
self.t['resources']['super-cluster']['properties'].pop(
'neutron_management_network')
cluster = self._init_cluster(self.t)
self.patchobject(cluster, 'is_using_neutron', return_value=True)
ex = self.assertRaises(exception.StackValidationFailed,
cluster.validate)
self.assertEqual("neutron_management_network must be provided",
six.text_type(ex))
def test_deprecated_properties_correctly_translates(self):
tmpl = '''
heat_template_version: 2013-05-23
description: Hadoop Cluster by Sahara
resources:
super-cluster:
type: OS::Sahara::Cluster
properties:
name: super-cluster
plugin_name: vanilla
hadoop_version: 2.3.0
cluster_template_id: some_cluster_template_id
image: some_image
key_name: admin
neutron_management_network: some_network
'''
ct = self._create_cluster(template_format.parse(tmpl))
self.assertEqual('some_image', ct.properties.get('default_image_id'))
self.assertIsNone(ct.properties.get('image'))
|
|
import os.path
from django.core.management import call_command
from django_nose.tools import (
assert_equal,
assert_false,
assert_is_none,
assert_raises,
assert_true,
)
from django.db.models import Q
from django.test.utils import override_settings
from mock import call, Mock, patch
from pontoon.base.models import (
Entity,
ProjectLocale,
TranslationMemoryEntry,
User
)
from pontoon.base.tests import (
assert_attributes_equal,
ChangedEntityLocaleFactory,
EntityFactory,
IdenticalTranslationFactory,
LocaleFactory,
PluralEntityFactory,
ProjectFactory,
ProjectLocaleFactory,
RepositoryFactory,
ResourceFactory,
TranslatedResourceFactory,
SubpageFactory,
TestCase,
TranslationFactory,
UserFactory
)
from pontoon.base.utils import aware_datetime
from pontoon.sync import KEY_SEPARATOR
class CreateUserTests(TestCase):
def test_create_super_user(self):
"""
Check if that's possible to create user.
Test against possible regressions in User model.
"""
username = 'superuser@example.com'
call_command('createsuperuser', email=username, username=username, interactive=False)
assert User.objects.get(username=username)
assert User.objects.get(email=username)
class ProjectTests(TestCase):
def test_can_commit_no_repos(self):
"""can_commit should be False if there are no repos."""
project = ProjectFactory.create(repositories=[])
assert_false(project.can_commit)
def test_can_commit_false(self):
"""
can_commit should be False if there are no repo that can be
committed to.
"""
repo = RepositoryFactory.create(type='file')
project = ProjectFactory.create(repositories=[repo])
assert_false(project.can_commit)
def test_can_commit_true(self):
"""
can_commit should be True if there is a repo that can be
committed to.
"""
repo = RepositoryFactory.create(type='git')
project = ProjectFactory.create(repositories=[repo])
assert_true(project.can_commit)
# We only test type here because the other compatibility methods are
# basically the same, and they're meant to be removed in the future
# anyway.
def test_repository_type_no_repo(self):
"""If a project has no repos, repository_type should be None."""
project = ProjectFactory.create(repositories=[])
assert_equal(project.repository_type, None)
def test_repository_type_first(self):
"""
If a project has repos, return the type of the repo created
first.
"""
project = ProjectFactory.create(repositories=[])
RepositoryFactory.create(project=project, type='git')
RepositoryFactory.create(project=project, type='hg')
assert_equal(project.repository_type, 'git')
def test_repository_for_path_none(self):
"""
If the project has no matching repositories, raise a ValueError.
"""
project = ProjectFactory.create(repositories=[])
with assert_raises(ValueError):
project.repository_for_path('doesnt/exist')
def test_repository_for_path(self):
"""
Return the first repo found with a checkout path that contains
the given path.
"""
repo1, repo2, repo3 = RepositoryFactory.create_batch(3)
project = ProjectFactory.create(repositories=[repo1, repo2, repo3])
path = os.path.join(repo2.checkout_path, 'foo', 'bar')
assert_equal(project.repository_for_path(path), repo2)
def test_needs_sync(self):
"""
Project.needs_sync should be True if ChangedEntityLocale objects
exist for its entities or if Project has unsynced locales.
"""
project = ProjectFactory.create()
assert_false(project.needs_sync)
ChangedEntityLocaleFactory.create(entity__resource__project=project)
assert_true(project.needs_sync)
project = ProjectFactory.create()
assert_false(project.needs_sync)
del project.unsynced_locales
ProjectLocaleFactory.create(
project=project,
locale=LocaleFactory.create()
)
assert_true(project.needs_sync)
def test_get_latest_activity_with_latest(self):
"""
If the project has a latest_translation and no locale is given,
return it.
"""
project = ProjectFactory.create()
translation = TranslationFactory.create(entity__resource__project=project)
project.latest_translation = translation
project.save()
assert_equal(project.get_latest_activity(), translation.latest_activity)
def test_get_latest_activity_without_latest(self):
"""
If the project doesn't have a latest_translation and no locale
is given, return None.
"""
project = ProjectFactory.create(latest_translation=None)
assert_is_none(project.get_latest_activity())
def test_get_latest_activity_with_locale(self):
"""
If a locale is given, defer to
ProjectLocale.get_latest_activity.
"""
locale = LocaleFactory.create()
project = ProjectFactory.create(locales=[locale])
with patch.object(ProjectLocale, 'get_latest_activity') as mock_get_latest_activity:
mock_get_latest_activity.return_value = 'latest'
assert_equal(project.get_latest_activity(locale=locale), 'latest')
mock_get_latest_activity.assert_called_with(project, locale)
class LocalePartsTests(TestCase):
def setUp(self):
self.locale, self.locale_other = LocaleFactory.create_batch(2)
self.project = ProjectFactory.create(
locales=[self.locale, self.locale_other]
)
self.resource = ResourceFactory.create(
project=self.project,
path='/main/path.po'
)
EntityFactory.create(resource=self.resource)
TranslatedResourceFactory.create(resource=self.resource, locale=self.locale)
def test_parts_stats_no_page_one_resource(self):
"""
Return resource paths and stats if no subpage and one resource defined.
"""
details = self.locale.parts_stats(self.project)
assert_equal(len(details), 2)
assert_equal(details[0]['title'], '/main/path.po')
assert_equal(details[0]['translated_strings'], 0)
def test_parts_stats_no_page_multiple_resources(self):
"""
Return resource paths and stats for locales resources are available for.
"""
resource_other = ResourceFactory.create(
project=self.project,
path='/other/path.po'
)
EntityFactory.create(resource=resource_other)
TranslatedResourceFactory.create(resource=resource_other, locale=self.locale)
TranslatedResourceFactory.create(resource=resource_other, locale=self.locale_other)
details = self.locale.parts_stats(self.project)
details_other = self.locale_other.parts_stats(self.project)
assert_equal(details[0]['title'], '/main/path.po')
assert_equal(details[0]['translated_strings'], 0)
assert_equal(details[1]['title'], '/other/path.po')
assert_equal(details[1]['translated_strings'], 0)
assert_equal(len(details_other), 2)
assert_equal(details_other[0]['title'], '/other/path.po')
assert_equal(details_other[0]['translated_strings'], 0)
def test_parts_stats_pages_not_tied_to_resources(self):
"""
Return subpage name and stats.
"""
SubpageFactory.create(project=self.project, name='Subpage')
details = self.locale.parts_stats(self.project)
assert_equal(details[0]['title'], 'Subpage')
assert_equal(details[0]['translated_strings'], 0)
def test_parts_stats_pages_tied_to_resources(self):
"""
Return subpage name and stats for locales resources are available for.
"""
resource_other = ResourceFactory.create(
project=self.project,
path='/other/path.po'
)
EntityFactory.create(resource=resource_other)
TranslatedResourceFactory.create(resource=resource_other, locale=self.locale)
TranslatedResourceFactory.create(resource=resource_other, locale=self.locale_other)
SubpageFactory.create(
project=self.project,
name='Subpage',
resources=[self.resource]
)
SubpageFactory.create(
project=self.project,
name='Other Subpage',
resources=[resource_other]
)
details = self.locale.parts_stats(self.project)
details_other = self.locale_other.parts_stats(self.project)
assert_equal(details[0]['title'], 'Other Subpage')
assert_equal(details[0]['translated_strings'], 0)
assert_equal(details[1]['title'], 'Subpage')
assert_equal(details[1]['translated_strings'], 0)
assert_equal(details_other[0]['title'], 'Other Subpage')
assert_equal(details_other[0]['translated_strings'], 0)
class RepositoryTests(TestCase):
def test_checkout_path(self):
"""checkout_path should be determined by the repo URL."""
repo = RepositoryFactory.create(
url='https://example.com/path/to/locale/',
project__slug='test-project'
)
with self.settings(MEDIA_ROOT='/media/root'):
assert_equal(
repo.checkout_path,
'/media/root/projects/test-project/path/to/locale'
)
def test_checkout_path_multi_locale(self):
"""
The checkout_path for multi-locale repos should not include the
locale_code variable.
"""
repo = RepositoryFactory.create(
url='https://example.com/path/to/{locale_code}/',
project__slug='test-project',
)
with self.settings(MEDIA_ROOT='/media/root'):
assert_equal(
repo.checkout_path,
'/media/root/projects/test-project/path/to'
)
def test_checkout_path_source_repo(self):
"""
The checkout_path for a source repo should end with a templates
directory.
"""
repo = RepositoryFactory.create(
url='https://example.com/path/to/locale/',
project__slug='test-project',
source_repo=True
)
with self.settings(MEDIA_ROOT='/media/root'):
assert_equal(
repo.checkout_path,
'/media/root/projects/test-project/path/to/locale/templates'
)
def test_locale_checkout_path(self):
"""Append the locale code the the project's checkout_path."""
repo = RepositoryFactory.create(
url='https://example.com/path/{locale_code}/',
project__slug='test-project',
)
locale = LocaleFactory.create(code='test-locale')
with self.settings(MEDIA_ROOT='/media/root'):
assert_equal(
repo.locale_checkout_path(locale),
'/media/root/projects/test-project/path/test-locale'
)
def test_locale_checkout_path_non_multi_locale(self):
"""If the repo isn't multi-locale, throw a ValueError."""
repo = RepositoryFactory.create()
locale = LocaleFactory.create()
with assert_raises(ValueError):
repo.locale_checkout_path(locale)
def test_locale_url(self):
"""Fill in the {locale_code} variable in the URL."""
repo = RepositoryFactory.create(
url='https://example.com/path/to/{locale_code}/',
)
locale = LocaleFactory.create(code='test-locale')
assert_equal(repo.locale_url(locale), 'https://example.com/path/to/test-locale/')
def test_locale_url_non_multi_locale(self):
"""If the repo isn't multi-locale, throw a ValueError."""
repo = RepositoryFactory.create()
locale = LocaleFactory.create()
with assert_raises(ValueError):
repo.locale_url(locale)
def test_url_for_path(self):
"""
Return the first locale_checkout_path for locales active for the
repo's project that matches the given path.
"""
matching_locale = LocaleFactory.create(code='match')
non_matching_locale = LocaleFactory.create(code='nomatch')
repo = RepositoryFactory.create(
project__locales=[matching_locale, non_matching_locale],
project__slug='test-project',
url='https://example.com/path/to/{locale_code}/',
)
with self.settings(MEDIA_ROOT='/media/root'):
test_path = '/media/root/projects/test-project/path/to/match/foo/bar.po'
assert_equal(repo.url_for_path(test_path), 'https://example.com/path/to/match/')
def test_url_for_path_no_match(self):
"""
If no active locale matches the given path, raise a ValueError.
"""
repo = RepositoryFactory.create(
project__locales=[],
url='https://example.com/path/to/{locale_code}/',
)
with assert_raises(ValueError):
repo.url_for_path('/media/root/path/to/match/foo/bar.po')
def test_pull(self):
repo = RepositoryFactory.create(type='git', url='https://example.com')
with patch('pontoon.base.models.update_from_vcs') as update_from_vcs, \
patch('pontoon.base.models.get_revision') as mock_get_revision:
mock_get_revision.return_value = 'asdf'
assert_equal(repo.pull(), {'single_locale': 'asdf'})
update_from_vcs.assert_called_with(
'git',
'https://example.com',
repo.checkout_path,
''
)
def test_pull_multi_locale(self):
"""
If the repo is multi-locale, pull all of the repos for the
active locales.
"""
locale1 = LocaleFactory.create(code='locale1')
locale2 = LocaleFactory.create(code='locale2')
repo = RepositoryFactory.create(
type='git',
url='https://example.com/{locale_code}/',
project__locales=[locale1, locale2]
)
repo.locale_url = lambda locale: 'https://example.com/' + locale.code
repo.locale_checkout_path = lambda locale: '/media/' + locale.code
with patch('pontoon.base.models.update_from_vcs') as update_from_vcs, \
patch('pontoon.base.models.get_revision') as mock_get_revision:
# Return path as the revision so different locales return
# different values.
mock_get_revision.side_effect = lambda type, path: path
assert_equal(repo.pull(), {
'locale1': '/media/locale1',
'locale2': '/media/locale2'
})
update_from_vcs.assert_has_calls([
call('git', 'https://example.com/locale1', '/media/locale1', ''),
call('git', 'https://example.com/locale2', '/media/locale2', '')
])
def test_commit(self):
repo = RepositoryFactory.create(type='git', url='https://example.com')
with patch('pontoon.base.models.commit_to_vcs') as commit_to_vcs:
repo.commit('message', 'author', 'path')
commit_to_vcs.assert_called_with(
'git',
'path',
'message',
'author',
'',
'https://example.com',
)
def test_commit_multi_locale(self):
"""
If the repo is multi-locale, use the url from url_for_path for
committing.
"""
repo = RepositoryFactory.create(
type='git',
url='https://example.com/{locale_code}/',
)
repo.url_for_path = Mock(return_value='https://example.com/for_path')
with patch('pontoon.base.models.commit_to_vcs') as commit_to_vcs:
repo.commit('message', 'author', 'path')
commit_to_vcs.assert_called_with(
'git',
'path',
'message',
'author',
'',
'https://example.com/for_path',
)
repo.url_for_path.assert_called_with('path')
class UserTranslationManagerTests(TestCase):
@override_settings(EXCLUDE=('excluded@example.com',))
def test_excluded_contributors(self):
"""
Checks if contributors with mails in settings.EXCLUDE are excluded
from top contributors list.
"""
included_contributor = TranslationFactory.create(user__email='included@example.com').user
excluded_contributor = TranslationFactory.create(user__email='excluded@example.com').user
top_contributors = User.translators.with_translation_counts()
assert_true(included_contributor in top_contributors)
assert_true(excluded_contributor not in top_contributors)
def test_users_without_translations(self):
"""
Checks if user contributors without translations aren't returned.
"""
active_contributor = TranslationFactory.create(user__email='active@example.com').user
inactive_contributor = UserFactory.create(email='inactive@example.com')
top_contributors = User.translators.with_translation_counts()
assert_true(active_contributor in top_contributors)
assert_true(inactive_contributor not in top_contributors)
def test_unique_translations(self):
"""
Checks if contributors with identical translations are returned.
"""
unique_translator = TranslationFactory.create().user
identical_translator = IdenticalTranslationFactory.create().user
top_contributors = User.translators.with_translation_counts()
assert_true(unique_translator in top_contributors)
assert_true(identical_translator not in top_contributors)
def test_contributors_order(self):
"""
Checks if users are ordered by count of contributions.
"""
contributors = [
self.create_contributor_with_translation_counts(2),
self.create_contributor_with_translation_counts(4),
self.create_contributor_with_translation_counts(9),
self.create_contributor_with_translation_counts(1),
self.create_contributor_with_translation_counts(6),
]
assert_equal(list(User.translators.with_translation_counts()), [
contributors[2],
contributors[4],
contributors[1],
contributors[0],
contributors[3]])
def test_contributors_limit(self):
"""
Checks if proper count of user is returned.
"""
TranslationFactory.create_batch(110)
top_contributors = User.translators.with_translation_counts()
assert_equal(top_contributors.count(), 100)
def create_contributor_with_translation_counts(self, approved=0, unapproved=0, needs_work=0, **kwargs):
"""
Helper method, creates contributor with given translations counts.
"""
contributor = UserFactory.create()
TranslationFactory.create_batch(approved, user=contributor, approved=True, **kwargs)
TranslationFactory.create_batch(unapproved, user=contributor, approved=False, fuzzy=False, **kwargs)
TranslationFactory.create_batch(needs_work, user=contributor, fuzzy=True, **kwargs)
return contributor
def test_translation_counts(self):
"""
Checks if translation counts are calculated properly.
Tests creates 3 contributors with different numbers translations and checks if their counts match.
"""
first_contributor = self.create_contributor_with_translation_counts(approved=7, unapproved=3, needs_work=2)
second_contributor = self.create_contributor_with_translation_counts(approved=5, unapproved=9, needs_work=2)
third_contributor = self.create_contributor_with_translation_counts(approved=1, unapproved=2, needs_work=5)
top_contributors = User.translators.with_translation_counts()
assert_equal(top_contributors.count(), 3)
assert_equal(top_contributors[0], second_contributor)
assert_equal(top_contributors[1], first_contributor)
assert_equal(top_contributors[2], third_contributor)
assert_attributes_equal(top_contributors[0], translations_count=16,
translations_approved_count=5, translations_unapproved_count=9,
translations_needs_work_count=2)
assert_attributes_equal(top_contributors[1], translations_count=12,
translations_approved_count=7, translations_unapproved_count=3,
translations_needs_work_count=2)
assert_attributes_equal(top_contributors[2], translations_count=8,
translations_approved_count=1, translations_unapproved_count=2,
translations_needs_work_count=5)
def test_period_filters(self):
"""
Total counts should be filtered by given date.
Test creates 2 contributors with different activity periods and checks if they are filtered properly.
"""
first_contributor = self.create_contributor_with_translation_counts(approved=12, unapproved=1, needs_work=2,
date=aware_datetime(2015, 3, 2))
# Second contributor
self.create_contributor_with_translation_counts(approved=2, unapproved=11, needs_work=2,
date=aware_datetime(2015, 6, 1))
TranslationFactory.create_batch(5, approved=True, user=first_contributor, date=aware_datetime(2015, 7, 2))
top_contributors = User.translators.with_translation_counts(aware_datetime(2015, 6, 10))
assert_equal(top_contributors.count(), 1)
assert_attributes_equal(top_contributors[0], translations_count=5,
translations_approved_count=5, translations_unapproved_count=0,
translations_needs_work_count=0)
top_contributors = User.translators.with_translation_counts(aware_datetime(2015, 5, 10))
assert_equal(top_contributors.count(), 2)
assert_attributes_equal(top_contributors[0], translations_count=15,
translations_approved_count=2, translations_unapproved_count=11,
translations_needs_work_count=2)
assert_attributes_equal(top_contributors[1], translations_count=5,
translations_approved_count=5, translations_unapproved_count=0,
translations_needs_work_count=0)
top_contributors = User.translators.with_translation_counts(aware_datetime(2015, 1, 10))
assert_equal(top_contributors.count(), 2)
assert_attributes_equal(top_contributors[0], translations_count=20,
translations_approved_count=17, translations_unapproved_count=1,
translations_needs_work_count=2)
assert_attributes_equal(top_contributors[1], translations_count=15,
translations_approved_count=2, translations_unapproved_count=11,
translations_needs_work_count=2)
def test_query_args_filtering(self):
"""
Tests if query args are honored properly and contributors are filtered.
"""
locale_first, locale_second = LocaleFactory.create_batch(2)
first_contributor = self.create_contributor_with_translation_counts(
approved=12, unapproved=1, needs_work=2, locale=locale_first)
second_contributor = self.create_contributor_with_translation_counts(
approved=11, unapproved=1, needs_work=2, locale=locale_second)
third_contributor = self.create_contributor_with_translation_counts(
approved=10, unapproved=12, needs_work=2, locale=locale_first)
# Testing filtering for the first locale
top_contributors = User.translators.with_translation_counts(aware_datetime(2015, 1, 1), Q(translation__locale=locale_first))
assert_equal(top_contributors.count(), 2)
assert_equal(top_contributors[0], third_contributor)
assert_attributes_equal(top_contributors[0], translations_count=24,
translations_approved_count=10, translations_unapproved_count=12,
translations_needs_work_count=2)
assert_equal(top_contributors[1], first_contributor)
assert_attributes_equal(top_contributors[1], translations_count=15,
translations_approved_count=12, translations_unapproved_count=1,
translations_needs_work_count=2)
# Testing filtering for the second locale
top_contributors = User.translators.with_translation_counts(aware_datetime(2015, 1, 1), Q(translation__locale=locale_second))
assert_equal(top_contributors.count(), 1)
assert_equal(top_contributors[0], second_contributor)
assert_attributes_equal(top_contributors[0], translations_count=14,
translations_approved_count=11, translations_unapproved_count=1,
translations_needs_work_count=2)
class EntityTests(TestCase):
def setUp(self):
self.locale = LocaleFactory.create(
cldr_plurals="0,1"
)
self.project = ProjectFactory.create(
locales=[self.locale]
)
self.main_resource = ResourceFactory.create(
project=self.project,
path='main.lang'
)
self.other_resource = ResourceFactory.create(
project=self.project,
path='other.lang'
)
self.main_entity = EntityFactory.create(
resource=self.main_resource,
string='Source String',
string_plural='Plural Source String',
key='Source String'
)
self.other_entity = EntityFactory.create(
resource=self.other_resource,
string='Other Source String',
key='Key' + KEY_SEPARATOR + 'Other Source String'
)
self.main_translation = TranslationFactory.create(
entity=self.main_entity,
locale=self.locale,
plural_form=0,
string='Translated String'
)
self.main_translation_plural = TranslationFactory.create(
entity=self.main_entity,
locale=self.locale,
plural_form=1,
string='Translated Plural String'
)
self.other_translation = TranslationFactory.create(
entity=self.other_entity,
locale=self.locale,
string='Other Translated String'
)
self.subpage = SubpageFactory.create(
project=self.project,
name='Subpage',
resources=[self.main_resource]
)
def assert_serialized_entity(self, entity, path, original, translation):
assert_equal(entity['path'], path)
assert_equal(entity['original'], original)
assert_equal(entity['translation'][0]['string'], translation)
def test_for_project_locale_filter(self):
"""
Evaluate entities filtering by locale, project, obsolete.
"""
other_locale = LocaleFactory.create()
other_project = ProjectFactory.create(
locales=[self.locale, other_locale]
)
# Obsolete_entity
EntityFactory.create(
obsolete=True,
resource=self.main_resource,
string='Obsolete String'
)
entities = Entity.for_project_locale(self.project, other_locale)
assert_equal(len(entities), 0)
entities = Entity.for_project_locale(other_project, self.locale)
assert_equal(len(entities), 0)
entities = Entity.for_project_locale(self.project, self.locale)
assert_equal(len(entities), 2)
def test_for_project_locale_no_paths(self):
"""
If paths not specified, return all project entities along with their
translations for locale.
"""
entities = Entity.map_entities(self.locale, Entity.for_project_locale(self.project, self.locale))
assert_equal(len(entities), 2)
self.assert_serialized_entity(
entities[0], 'main.lang', 'Source String', 'Translated String')
self.assert_serialized_entity(
entities[1], 'other.lang', 'Other Source String', 'Other Translated String')
# Ensure all attributes are assigned correctly
assert_equal(entities[0], {
'comment': '',
'format': 'po',
'obsolete': False,
'marked': 'Source String',
'key': '',
'path': 'main.lang',
'translation': [{
'pk': self.main_translation.pk,
'fuzzy': False,
'string': 'Translated String',
'approved': False
}, {
'pk': self.main_translation_plural.pk,
'fuzzy': False,
'string': 'Translated Plural String',
'approved': False
}],
'order': 0,
'source': [],
'original_plural': 'Plural Source String',
'marked_plural': 'Plural Source String',
'pk': self.main_entity.pk,
'original': 'Source String',
'visible': False,
})
def test_for_project_locale_paths(self):
"""
If paths specified, return project entities from these paths only along
with their translations for locale.
"""
paths = ['other.lang']
entities = Entity.map_entities(self.locale, Entity.for_project_locale(self.project, self.locale, paths))
assert_equal(len(entities), 1)
self.assert_serialized_entity(
entities[0], 'other.lang', 'Other Source String', 'Other Translated String')
def test_for_project_locale_subpages(self):
"""
If paths specified as subpages, return project entities from paths
assigned to these subpages only along with their translations for
locale.
"""
subpages = [self.subpage.name]
entities = Entity.map_entities(self.locale, Entity.for_project_locale(self.project, self.locale, subpages))
assert_equal(len(entities), 1)
self.assert_serialized_entity(
entities[0], 'main.lang', 'Source String', 'Translated String')
def test_for_project_locale_plurals(self):
"""
For pluralized strings, return all available plural forms.
"""
entities = Entity.map_entities(self.locale, Entity.for_project_locale(self.project, self.locale))
assert_equal(entities[0]['original'], 'Source String')
assert_equal(entities[0]['original_plural'], 'Plural Source String')
assert_equal(entities[0]['translation'][0]['string'], 'Translated String')
assert_equal(entities[0]['translation'][1]['string'], 'Translated Plural String')
def test_for_project_locale_order(self):
"""
Return entities in correct order.
"""
# First entity
EntityFactory.create(
order=1,
resource=self.main_resource,
string='Second String'
)
# Second entity
EntityFactory.create(
order=0,
resource=self.main_resource,
string='First String'
)
entities = Entity.map_entities(self.locale, Entity.for_project_locale(self.project, self.locale))
assert_equal(entities[1]['original'], 'First String')
assert_equal(entities[2]['original'], 'Second String')
def test_for_project_locale_cleaned_key(self):
"""
If key contais source string and Translate Toolkit separator,
remove them.
"""
entities = Entity.map_entities(self.locale, Entity.for_project_locale(self.project, self.locale))
assert_equal(entities[0]['key'], '')
assert_equal(entities[1]['key'], 'Key')
class LocaleTests(TestCase):
def test_get_latest_activity_with_latest(self):
"""
If the locale has a latest_translation and no project is given,
return it.
"""
translation = TranslationFactory.create()
locale = LocaleFactory.create(latest_translation=translation)
assert_equal(locale.get_latest_activity(), translation.latest_activity)
def test_get_latest_activity_without_latest(self):
"""
If the locale doesn't have a latest_translation and no project
is given, return None.
"""
locale = LocaleFactory.create(latest_translation=None)
assert_is_none(locale.get_latest_activity())
def test_get_latest_activity_with_project(self):
"""
If a locale is given, defer to
ProjectLocale.get_latest_activity.
"""
locale = LocaleFactory.create()
project = ProjectFactory.create(locales=[locale])
with patch.object(ProjectLocale, 'get_latest_activity') as mock_get_latest_activity:
mock_get_latest_activity.return_value = 'latest'
assert_equal(locale.get_latest_activity(project=project), 'latest')
mock_get_latest_activity.assert_called_with(locale, project)
def test_translators_group(self):
"""
Tests if user has permission to translate locales after assigment.
"""
user = UserFactory.create()
[first_locale, second_locale] = LocaleFactory.create_batch(2)
assert_equal(user.has_perm('base.can_translate_locale'), False)
assert_equal(user.has_perm('base.can_translate_locale', first_locale), False)
assert_equal(user.has_perm('base.can_translate_locale', second_locale), False)
user.groups.add(second_locale.translators_group)
assert_equal(user.has_perm('base.can_translate_locale'), False)
assert_equal(user.has_perm('base.can_translate_locale', first_locale), False)
assert_equal(user.has_perm('base.can_translate_locale', second_locale), True)
user.groups.add(first_locale.translators_group)
assert_equal(user.has_perm('base.can_translate_locale'), False)
assert_equal(user.has_perm('base.can_translate_locale', first_locale), True)
assert_equal(user.has_perm('base.can_translate_locale', second_locale), True)
def test_managers_group(self):
"""
Tests if user has permission to manage and translate locales after assigment.
"""
user = UserFactory.create()
[first_locale, second_locale] = LocaleFactory.create_batch(2)
assert_equal(user.has_perm('base.can_translate_locale'), False)
assert_equal(user.has_perm('base.can_translate_locale', first_locale), False)
assert_equal(user.has_perm('base.can_translate_locale', second_locale), False)
assert_equal(user.has_perm('base.can_manage_locale'), False)
assert_equal(user.has_perm('base.can_manage_locale', first_locale), False)
assert_equal(user.has_perm('base.can_manage_locale', second_locale), False)
user.groups.add(second_locale.managers_group)
assert_equal(user.has_perm('base.can_translate_locale'), False)
assert_equal(user.has_perm('base.can_translate_locale', first_locale), False)
assert_equal(user.has_perm('base.can_translate_locale', second_locale), True)
assert_equal(user.has_perm('base.can_manage_locale'), False)
assert_equal(user.has_perm('base.can_manage_locale', first_locale), False)
assert_equal(user.has_perm('base.can_manage_locale', second_locale), True)
user.groups.add(first_locale.managers_group)
assert_equal(user.has_perm('base.can_translate_locale'), False)
assert_equal(user.has_perm('base.can_translate_locale', first_locale), True)
assert_equal(user.has_perm('base.can_translate_locale', second_locale), True)
assert_equal(user.has_perm('base.can_manage_locale'), False)
assert_equal(user.has_perm('base.can_manage_locale', first_locale), True)
assert_equal(user.has_perm('base.can_manage_locale', second_locale), True)
class ProjectLocaleTests(TestCase):
def setUp(self):
super(ProjectLocaleTests, self).setUp()
self.locale = LocaleFactory.create()
self.project = ProjectFactory.create()
def test_get_latest_activity_doesnt_exist(self):
"""
If no ProjectLocale exists with the given project/locale,
return None.
"""
assert_false(ProjectLocale.objects
.filter(project=self.project, locale=self.locale)
.exists())
assert_is_none(ProjectLocale.get_latest_activity(self.project, self.locale))
def test_get_latest_activity_no_latest(self):
"""
If the matching ProjectLocale has no latest_translation, return
None.
"""
ProjectLocaleFactory.create(
project=self.project,
locale=self.locale,
latest_translation=None
)
assert_is_none(ProjectLocale.get_latest_activity(self.project, self.locale))
def test_get_latest_activity_success(self):
"""
If the matching ProjectLocale has a latest_translation, return
it's latest_activity.
"""
translation = TranslationFactory.create(
locale=self.locale,
entity__resource__project=self.project
)
ProjectLocaleFactory.create(
project=self.project,
locale=self.locale,
latest_translation=translation
)
assert_equal(
ProjectLocale.get_latest_activity(self.project, self.locale),
translation.latest_activity
)
def test_translators_group(self):
"""
Tests if user has permission to translate project at specific locale after assigment.
"""
project_locale = ProjectLocaleFactory.create(
project=self.project,
locale=self.locale,
latest_translation=None,
has_custom_translators=True,
)
user = UserFactory.create()
assert_equal(user.can_translate(locale=self.locale, project=self.project), False)
user.groups.add(project_locale.translators_group)
assert_equal(user.can_translate(locale=self.locale, project=self.project), True)
project_locale.has_custom_translators = False
project_locale.save()
assert_equal(user.can_translate(locale=self.locale, project=self.project), False)
class TranslationTests(TestCase):
def assert_latest_translation(self, instance, translation):
instance.refresh_from_db()
assert_equal(instance.latest_translation, translation)
def test_save_latest_translation_update(self):
"""
When a translation is saved, update the latest_translation
attribute on the related project, locale, translatedresource,
and project_locale objects.
"""
locale = LocaleFactory.create(latest_translation=None)
project = ProjectFactory.create(locales=[locale], latest_translation=None)
resource = ResourceFactory.create(project=project)
translatedresource = TranslatedResourceFactory.create(locale=locale, resource=resource, latest_translation=None)
project_locale = ProjectLocale.objects.get(locale=locale, project=project)
assert_is_none(locale.latest_translation)
assert_is_none(project.latest_translation)
assert_is_none(translatedresource.latest_translation)
assert_is_none(project_locale.latest_translation)
translation = TranslationFactory.create(
locale=locale,
entity__resource=resource,
date=aware_datetime(1970, 1, 1)
)
self.assert_latest_translation(locale, translation)
self.assert_latest_translation(project, translation)
self.assert_latest_translation(translatedresource, translation)
self.assert_latest_translation(project_locale, translation)
# Ensure translation is replaced for newer translations
newer_translation = TranslationFactory.create(
locale=locale,
entity__resource=resource,
date=aware_datetime(1970, 2, 1)
)
self.assert_latest_translation(locale, newer_translation)
self.assert_latest_translation(project, newer_translation)
self.assert_latest_translation(translatedresource, newer_translation)
self.assert_latest_translation(project_locale, newer_translation)
# Ensure translation isn't replaced for older translations.
TranslationFactory.create(
locale=locale,
entity__resource=resource,
date=aware_datetime(1970, 1, 5)
)
self.assert_latest_translation(locale, newer_translation)
self.assert_latest_translation(project, newer_translation)
self.assert_latest_translation(translatedresource, newer_translation)
self.assert_latest_translation(project_locale, newer_translation)
# Ensure approved_date is taken into consideration as well.
newer_approved_translation = TranslationFactory.create(
locale=locale,
entity__resource=resource,
approved_date=aware_datetime(1970, 3, 1)
)
self.assert_latest_translation(locale, newer_approved_translation)
self.assert_latest_translation(project, newer_approved_translation)
self.assert_latest_translation(translatedresource, newer_approved_translation)
self.assert_latest_translation(project_locale, newer_approved_translation)
def test_save_latest_translation_missing_project_locale(self):
"""
If a translation is saved for a locale that isn't active on the
project, do not fail due to a missing ProjectLocale.
"""
locale = LocaleFactory.create(latest_translation=None)
project = ProjectFactory.create(latest_translation=None)
resource = ResourceFactory.create(project=project)
translatedresource = TranslatedResourceFactory.create(locale=locale, resource=resource, latest_translation=None)
# This calls .save, this should fail if we're not properly
# handling the missing ProjectLocale.
translation = TranslationFactory.create(
locale=locale,
entity__resource=resource,
date=aware_datetime(1970, 1, 1)
)
self.assert_latest_translation(locale, translation)
self.assert_latest_translation(project, translation)
self.assert_latest_translation(translatedresource, translation)
def test_approved_translation_in_memory(self):
"""
Every save of approved translation should generate a new
entry in the translation memory.
"""
translation = TranslationFactory.create(approved=True)
assert TranslationMemoryEntry.objects.get(
source=translation.entity.string,
target=translation.string,
locale=translation.locale
)
def test_unapproved_translation_in_memory(self):
"""
Unapproved translation shouldn't be in the translation memory.
"""
translation = TranslationFactory.create(approved=False)
with assert_raises(TranslationMemoryEntry.DoesNotExist):
TranslationMemoryEntry.objects.get(
source=translation.entity.string,
target=translation.string,
locale=translation.locale
)
def test_removed_translation(self):
"""
Suggestions should be available even after an Entity or
Translation has been removed.
"""
translation = TranslationFactory.create(approved=True)
assert TranslationMemoryEntry.objects.get(
source=translation.entity.string,
target=translation.string,
locale=translation.locale
)
entity = translation.entity
translation.delete()
assert TranslationMemoryEntry.objects.get(
source=translation.entity.string,
target=translation.string,
locale=translation.locale
)
entity.delete()
assert TranslationMemoryEntry.objects.get(
source=translation.entity.string,
target=translation.string,
locale=translation.locale
)
class EntityFilterTests(TestCase):
"""
Tests all filters provided by the entity manager.
"""
def setUp(self):
self.locale = LocaleFactory.create()
self.plural_locale = LocaleFactory.create(cldr_plurals='1,5')
def test_translated(self):
first_entity, second_entity, third_entity = EntityFactory.create_batch(3)
TranslationFactory.create(
locale=self.locale,
entity=first_entity,
approved=True
)
TranslationFactory.create(
locale=self.locale,
entity=second_entity,
fuzzy=True
)
TranslationFactory.create(
locale=self.locale,
entity=third_entity,
approved=True
)
assert_equal(
{first_entity, third_entity},
set(Entity.objects.with_status_counts(self.locale).filter(Entity.objects.translated()))
)
def test_translated_plurals(self):
first_entity, second_entity, third_entity = PluralEntityFactory.create_batch(3)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
approved=True,
plural_form=0
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
approved=True,
plural_form=1
)
TranslationFactory.create(
locale=self.plural_locale,
entity=second_entity,
approved=True,
plural_form=0
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
approved=True,
plural_form=0
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
approved=True,
plural_form=1
)
assert_equal(
{first_entity, third_entity},
set(Entity.objects.with_status_counts(self.plural_locale).filter(Entity.objects.translated()))
)
def test_fuzzy(self):
first_entity, second_entity, third_entity = EntityFactory.create_batch(3)
TranslationFactory.create(
locale=self.locale,
entity=first_entity,
fuzzy=True
)
TranslationFactory.create(
locale=self.locale,
entity=second_entity,
approved=True
)
TranslationFactory.create(
locale=self.locale,
entity=third_entity,
fuzzy=True
)
assert_equal(
{first_entity, third_entity},
set(Entity.objects.with_status_counts(self.locale).filter(Entity.objects.fuzzy()))
)
def test_fuzzy_plurals(self):
first_entity, second_entity, third_entity = PluralEntityFactory.create_batch(3)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
fuzzy=True,
plural_form=0
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
fuzzy=True,
plural_form=1
)
TranslationFactory.create(
locale=self.plural_locale,
entity=second_entity,
fuzzy=True,
plural_form=0
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
fuzzy=True,
plural_form=0
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
fuzzy=True,
plural_form=1
)
assert_equal(
{first_entity, third_entity},
set(Entity.objects.with_status_counts(self.plural_locale).filter(Entity.objects.fuzzy()))
)
def test_missing(self):
first_entity, second_entity, third_entity = EntityFactory.create_batch(3)
TranslationFactory.create(
locale=self.locale,
entity=first_entity,
approved=True
)
TranslationFactory.create(
locale=self.locale,
entity=third_entity
)
TranslationFactory.create(
locale=self.locale,
entity=third_entity
)
assert_equal(
{second_entity},
set(Entity.objects.with_status_counts(self.locale).filter(Entity.objects.missing()))
)
def test_partially_translated_plurals(self):
first_entity, second_entity, third_entity = PluralEntityFactory.create_batch(3,
string='Unchanged string',
string_plural='Unchanged plural string'
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
plural_form=0
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
plural_form=1
)
TranslationFactory.create(
locale=self.plural_locale,
entity=second_entity,
plural_form=0
)
assert_equal(
{second_entity, third_entity},
set(Entity.objects.with_status_counts(self.plural_locale).filter(Entity.objects.missing()))
)
def test_suggested(self):
first_entity, second_entity, third_entity = EntityFactory.create_batch(3)
TranslationFactory.create(
locale=self.locale,
entity=second_entity,
approved=False,
fuzzy=False,
)
TranslationFactory.create(
locale=self.locale,
entity=third_entity,
approved=False,
fuzzy=False,
)
assert_equal(
{second_entity, third_entity},
set(Entity.objects.with_status_counts(self.locale).filter(Entity.objects.suggested()))
)
def test_unchanged(self):
first_entity, second_entity, third_entity = EntityFactory.create_batch(3, string='Unchanged string')
TranslationFactory.create(
locale=self.locale,
entity=first_entity,
approved=True,
string='Unchanged string'
)
TranslationFactory.create(
locale=self.locale,
entity=third_entity,
fuzzy=True,
string='Unchanged string'
)
assert_equal(
{first_entity, third_entity},
set(Entity.objects.with_status_counts(self.locale).filter(Entity.objects.unchanged()))
)
def test_missing_plural(self):
first_entity, second_entity, third_entity = PluralEntityFactory.create_batch(3)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
fuzzy=True,
plural_form=0,
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
fuzzy=True,
plural_form=1,
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
approved=True,
plural_form=0,
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
approved=True,
plural_form=1,
)
assert_equal(
{second_entity},
set(Entity.objects.with_status_counts(self.plural_locale).filter(Entity.objects.missing()))
)
def test_suggested_plural(self):
first_entity, second_entity, third_entity = PluralEntityFactory.create_batch(3)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
approved=False,
fuzzy=False,
plural_form=0,
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
approved=False,
fuzzy=False,
plural_form=1,
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
approved=False,
fuzzy=False,
plural_form=0,
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
approved=False,
fuzzy=False,
plural_form=1,
)
assert_equal(
{first_entity, third_entity},
set(Entity.objects.with_status_counts(self.plural_locale).filter(Entity.objects.suggested()))
)
def test_unchanged_plural(self):
first_entity, second_entity, third_entity = PluralEntityFactory.create_batch(3,
string='Unchanged string',
string_plural='Unchanged plural string'
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
approved=True,
plural_form=0,
string='Unchanged string'
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
approved=True,
plural_form=1,
string='Unchanged plural string'
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
fuzzy=True,
plural_form=0,
string='Unchanged string'
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
fuzzy=True,
plural_form=1,
string='Unchanged plural string'
)
assert_equal(
{first_entity, third_entity},
set(Entity.objects.with_status_counts(self.plural_locale).filter(Entity.objects.unchanged()))
)
def test_has_suggestions_plural(self):
first_entity, second_entity, third_entity = PluralEntityFactory.create_batch(3,
string='Unchanged string',
string_plural='Unchanged plural string'
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
approved=True,
fuzzy=False,
plural_form=0
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
approved=True,
fuzzy=False,
plural_form=1,
)
TranslationFactory.create(
locale=self.plural_locale,
entity=first_entity,
approved=False,
fuzzy=False,
plural_form=2,
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
approved=True,
fuzzy=False,
plural_form=0,
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
approved=True,
fuzzy=False,
plural_form=1
)
TranslationFactory.create(
locale=self.plural_locale,
entity=third_entity,
approved=False,
fuzzy=False,
plural_form=2
)
assert_equal(
{first_entity, third_entity},
set(Entity.objects.with_status_counts(self.plural_locale).filter(Entity.objects.has_suggestions()))
)
def test_combined_filters(self):
"""
All filters should be joined by AND instead of OR.
Tests filters against bug introduced by bug 1243115.
"""
contributor = UserFactory.create()
project = ProjectFactory.create()
first_entity, second_entity = EntityFactory.create_batch(2, resource__project=project)
TranslationFactory.create(
locale=self.locale,
entity=first_entity,
approved=True,
fuzzy=False,
user=contributor,
)
TranslationFactory.create(
locale=self.locale,
entity=second_entity,
approved=True,
fuzzy=False,
user=contributor
)
TranslationFactory.create(
locale=self.locale,
entity=second_entity,
approved=False,
fuzzy=False,
user=contributor,
)
assert_equal(
list(Entity.for_project_locale(
project,
self.locale,
statuses='suggested',
authors=contributor.email
)),
[]
)
assert_equal(
list(Entity.for_project_locale(
project,
self.locale,
statuses='suggested',
time='201001010100-205001010100'
)),
[]
)
|
|
#!/usr/bin/env dls-python2.7
"""Selection of Qt plots that show information about the I10 chicane.
Plots are available for use in eithier the beamline or accelerator GUIs.
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.backends.backend_qt4agg import (
FigureCanvasQTAgg as FigureCanvas)
import scipy.integrate as integ
import controls
import cothread
class BaseFigureCanvas(FigureCanvas):
"""Initialise the figures for plotting."""
def __init__(self):
"""
Set up plot.
Initialise figure canvas, set plot background to be blue and
transparent (goes blue for simulation mode), get instance of PvMonitors
to receive updated PV values.
"""
self.figure = plt.figure()
FigureCanvas.__init__(self, self.figure)
self.figure.patch.set_facecolor('blue')
self.figure.patch.set_alpha(0.0)
self.pv_monitor = controls.PvMonitors.get_instance()
class Simulation(BaseFigureCanvas):
"""Plot the simulation of the I10 fast chicane."""
def __init__(self, straight):
"""Initialise the straight, axes, animation and graph shading."""
BaseFigureCanvas.__init__(self)
self.straight = straight
self.fill1 = None
self.fill2 = None
self.ax = self.fig_setup()
self.beams = self.data_setup()
self.anim = animation.FuncAnimation(self.figure, self.animate,
init_func=self.init_data, frames=1000, interval=20)
def fig_setup(self):
"""Set up axes."""
ax1 = self.figure.add_subplot(1, 1, 1)
ax1.set_xlim(self.straight.data.path[0].s,
self.straight.data.path[-1].s)
ax1.set_ylim(-0.01, 0.01)
ax1.set_xlabel('Distance along the straight/m')
ax1.set_ylabel('Deflection off axis/m') # Is it in m or other size??
# Plot positions of kickers and IDs.
for i in self.straight.data.kickers:
ax1.axvline(x=i.s, color='k', linestyle='dashed')
for i in self.straight.data.ids:
ax1.axvline(x=i.s, color='b', linestyle='dashed')
plt.tight_layout()
return ax1
def data_setup(self):
"""Set up data for the animation."""
beams = [
self.ax.plot([], [], 'b')[0],
self.ax.plot([], [], 'r')[0],
self.ax.plot([], [], 'r')[0]
]
return beams
def init_data(self):
for line in self.beams:
line.set_data([], [])
return self.beams
def beam_plot(self, t):
"""
Extract electron and photon beam positions from data for plotting.
Args:
t (int): time counter for the animation
Returns:
e_positions (numpy array): electron beam data without duplicated
values for plotting
p_positions (numpy array): photon position data (remove velocity
data as not needed for plotting)
"""
e_positions = np.array(self.straight.step(t)[0])[:, 0].tolist()
# Remove duplicates in data.
for i in range(len(self.straight.data.get_elements('drift'))):
if e_positions[i] == e_positions[i+1]:
e_positions.pop(i+1)
p_positions = np.array(self.straight.step(t)[1])[:, [0, 2]]
return e_positions, p_positions
def animate(self, t):
"""
Animation function.
Set data for animation frame at time t.
Args:
t (int): time counter for the animation
Returns:
beams (list): list of lines to be plotted
"""
data = self.beam_plot(t)
e_data = data[0]
p_data = data[1]
beams = self.init_data()
beams[0].set_data(self.straight.data.xaxis, e_data)
for line, x, y in zip(beams[1:],
self.straight.data.photon_coordinates, p_data):
line.set_data(x, y)
return beams
def update_colourin(self):
"""Shade in the range over which each photon beam sweeps."""
if self.fill1:
self.ax.collections.remove(self.fill1)
if self.fill2:
self.ax.collections.remove(self.fill2)
strengths = [np.array([1, 1, 1, 0, 0]), np.array([0, 0, 1, 1, 1])]
edges = [[], []]
for s in range(2):
edges[s] = np.array(
self.straight.p_beam_range(strengths[s]))[:, [0, 2]]
beam1max = edges[0][0]
beam1min = edges[1][0]
beam2max = edges[1][1]
beam2min = edges[0][1]
self.fill1 = self.ax.fill_between(
self.straight.data.photon_coordinates[0],
beam1min, beam1max, facecolor='blue', alpha=0.2)
self.fill2 = self.ax.fill_between(
self.straight.data.photon_coordinates[1],
beam2min, beam2max, facecolor='green', alpha=0.2)
def magnet_limits(self):
"""Show maximum currents that can be passed through the magnets."""
max_currents = self.pv_monitor.get_max_currents()
strengths = [np.array([max_currents[0],
-max_currents[1],
max_currents[2], 0, 0]),
np.array([0, 0, max_currents[2],
-max_currents[3],
max_currents[4]])]
edges = [[], []]
for s in range(2):
edges[s] = np.array(self.straight.p_beam_lim(strengths[s])
)[:, [0, 2]]
beam1max = edges[0][0]
beam2max = edges[1][1]
self.ax.plot(self.straight.data.photon_coordinates[0],
beam1max, 'r--')
self.ax.plot(self.straight.data.photon_coordinates[1],
beam2max, 'r--')
class OverlaidWaveforms(BaseFigureCanvas):
"""
Overlay the two intensity peaks of the x-ray beams.
'Cuts out' two peaks from X-ray intensity trace corresponding to the two
X-ray beams and displays them overlaid. Calculates areas under peaks and
displays as a legend, plots Gaussian for visual comparison of peak shapes.
"""
def __init__(self, ctrls):
BaseFigureCanvas.__init__(self)
self.ax = self.figure.add_subplot(2, 1, 1)
self.controls = ctrls
self.pv_monitor = self.controls.PvMonitors.get_instance()
self.pv_monitor.register_trace_listener(self.update_waveforms)
self.pv_monitor.register_trace_listener(self.update_overlaid_plot)
trigger = self.pv_monitor.arrays[self.controls.Arrays.WAVEFORMS][0]
trace = self.pv_monitor.arrays[self.controls.Arrays.WAVEFORMS][1]
traces_x_axis = range(len(trace))
self.trace_lines = [
self.ax.plot(traces_x_axis, trigger, 'b')[0],
self.ax.plot(traces_x_axis, trace, 'g')[0]
]
self.ax.set_xlabel('Time samples')
self.ax.set_ylabel('Voltage/V')
self.ax.set_title('Square wave trigger signal and beam intensity trace')
self.ax2 = self.figure.add_subplot(2, 1, 2)
first_peak, second_peak = self.get_windowed_data(trigger, trace)
self.overlaid_x_axis = range(len(first_peak))
self.overlaid_lines = [
self.ax2.plot(self.overlaid_x_axis, first_peak, 'b')[0],
self.ax2.plot(self.overlaid_x_axis, second_peak, 'g')[0]
]
self.ax2.set_xlabel('Time samples')
self.ax2.set_ylabel('Voltage/V')
self.ax2.set_title('Beam intensity peaks overlaid')
plt.tight_layout()
def update_waveforms(self, key, _):
"""Update plot data whenever it changes."""
if key == self.controls.Arrays.WAVEFORMS:
self.trace_lines[0].set_ydata(self.pv_monitor.arrays[key][0])
self.trace_lines[1].set_ydata(self.pv_monitor.arrays[key][1])
self.draw()
def update_overlaid_plot(self, key, _):
"""Update overlaid plot data whenever it changes, calculate areas."""
if key == self.controls.Arrays.WAVEFORMS:
trigger = self.pv_monitor.arrays[self.controls.Arrays.WAVEFORMS][0]
trace = self.pv_monitor.arrays[self.controls.Arrays.WAVEFORMS][1]
waveforms = [trigger, trace]
first_peak, second_peak = self.get_windowed_data(waveforms[0], waveforms[1])
self.overlaid_lines[0].set_ydata(first_peak)
self.overlaid_lines[0].set_xdata(range(len(first_peak)))
self.overlaid_lines[1].set_ydata(second_peak)
self.overlaid_lines[1].set_xdata(range(len(second_peak)))
areas = [integ.simps(first_peak), integ.simps(second_peak)]
labels = ['%.1f' % areas[0], '%.1f' % areas[1]]
# for area in areas:
# if area < 0.1:
# raise RangeError # calculation warning error for example
self.ax2.legend([self.overlaid_lines[0], self.overlaid_lines[1]],
labels)
self.draw()
def get_windowed_data(self, trigger, trace):
"""Overlay the two peaks."""
try:
diff = np.diff(trigger)
length = len(trace)
# TODO: this parameter probably shouldn't be hard coded
stepvalue = 0.5
if min(diff) > -1 * stepvalue or max(diff) < stepvalue:
raise RangeError
maxtrig = next(x for x in diff if x > stepvalue)
mintrig = next(x for x in diff if x < -1 * stepvalue)
edges = [np.where(diff == maxtrig)[0][0],
np.where(diff == mintrig)[0][0]]
cothread.Yield()
trigger_length = (edges[1]-edges[0])*2
if length < trigger_length:
raise RangeError
if edges[1] > edges[0]: # So that colours don't swap around
first_peak = np.roll(trace[:trigger_length], - edges[0]
- trigger_length/4)[:trigger_length/2]
second_peak = np.roll(trace[:trigger_length], - edges[1]
- trigger_length/4)[:trigger_length/2]
else:
first_peak = np.roll(trace[:trigger_length], - edges[1]
- trigger_length/4)[:trigger_length/2]
second_peak = np.roll(trace[:trigger_length], - edges[0]
- trigger_length/4)[:trigger_length/2]
return first_peak, second_peak
except RangeError:
print 'Trace is partially cut off' # status bar? callback?
first_peak = [float('nan'), float('nan')]
second_peak = [float('nan'), float('nan')]
return first_peak, second_peak
def gaussian(self, amp_step, sigma_step):
"""
Plot a theoretical Gaussian for comparison with the x-ray peaks.
Initialise the amplitude and standard deviation from a caget of the
trigger and trace. Amplitude is the maximum value of the trace plus
amp_step; sigma is 1/8th of the length of a full trigger cycle plus
sigma_step.
Args:
amp_step (int): amount by which the amplitude is increased or
decreased from the default value
sigma_step (int): as above but for sigma, the standard deviation
"""
l = len(self.overlaid_x_axis)
x = np.linspace(0, l, l) - l/2 # centre of data
# This is new code to 'guess' the size of the Gaussian from the
# existing data rather than from hard-coded numbers.
# TODO: test this! Possibly link up to the get_windowed_data function
# as it uses a lot of the same functionality
trigger = self.pv_monitor.arrays[self.controls.Arrays.WAVEFORMS][0]
trace = self.pv_monitor.arrays[self.controls.Arrays.WAVEFORMS][1]
amplitude = max(trace) + amp_step
diff = np.diff(trigger)
stepvalue = 0.5
if min(diff) > -1 * stepvalue or max(diff) < stepvalue:
raise RangeError
else:
maxtrig = next(x for x in diff if x > stepvalue)
mintrig = next(x for x in diff if x < -1 * stepvalue)
edges = [np.where(diff == maxtrig)[0][0],
np.where(diff == mintrig)[0][0]]
half_trigger_length = (edges[1]-edges[0])
sigma = half_trigger_length/4 + sigma_step
gauss = self.ax2.plot(amplitude * np.exp(-x**2 / (2 * sigma**2)), 'r')
self.overlaid_lines.append(gauss)
self.draw()
def clear_gaussian(self):
"""Remove the Gaussian."""
self.ax2.lines.pop(-1)
self.ax2.relim()
self.ax2.autoscale_view()
self.draw()
class RangeError(Exception):
"""Raised when the trace data is partially cut off."""
pass
|
|
# -*- coding: utf-8 -*-
import traceback
# turbogears imports
from tg import expose, redirect, validate, flash, request, response, override_template,config
from tg.decorators import paginate
# third party imports
from repoze.what import predicates,authorize
from repoze.what.predicates import not_anonymous,in_group,has_permission
# project specific imports
from tribal.lib.base import BaseController
from tribal.model import *
from tribal.util.common import *
from tribal.widgets.order import *
__all__ = ['OrderController']
class OrderController(BaseController):
#Uncomment this line if your controller requires an authenticated user
allow_only = authorize.not_anonymous()
@expose('tribal.templates.order.index')
@paginate('collections',items_per_page = 25)
@tabFocus(tab_type="main")
def index(self, **kw):
if not kw: return dict(collections = [], values = {})
cut_no = kw.get("cutNo")
if not cut_no:
flash("Please input Cut No","warn")
redirect("/order/index")
results = self._query_tribal({"CutNbr":cut_no})
if len(results) == 0 :
flash("No such PO")
redirect("/order/index")
elif len(results) > 1:
flash("There are too many POs, please confirm")
return dict(collections = results, values = kw)
override_template(self.index, 'mako:tribal.templates.order.order_form_new')
return self.placeOrder({'result': results, 'customerPO': kw.get("customerPO","")})
# else:
# flash("No such order type!")
# redirect("/order/index")
@expose("tribal.templates.order.order_form_edit")
@tabFocus(tab_type="main")
def placeOrder(self, kw):
result = kw.get('result', '')
header = result[0]
billTos = DBSession.query(TRBBillTo).order_by(TRBBillTo.company).all()
shiptos = DBSession.query(TRBShipTo).order_by(TRBShipTo.company).all()
custom_po = kw.get('customerPO', '')
return {"msgHeader": header,
"msgDetail": header.details,
"billTos": billTos,
"shipTos": shiptos,
'custom_po': custom_po
}
@expose()
def saveOrder(self, **kw):
ph = DBSession.query(TRBHeaderPO).get(int(kw["msgID"]))
billTo = DBSession.query(TRBBillTo).get(int(kw['billCompany']))
shipTo = DBSession.query(TRBShipTo).get(int(kw['shipCompany']))
DBSession.begin(subtransactions=True)
try:
pd_list = []
params = {"header" : ph,
"customerPO" : kw.get('customerPO', ''),
"billTo" : billTo,
"shipTo" : shipTo,
#"issuedBy" : request.identity["user"],
#"lastModifyBy" : request.identity["user"],
}
order = TRBOrderFormHeader(**params)
DBSession.add(order)
order_details = []
for i in range(len(ph.details)):
if kw['quantity_%d' % ph.details[i].id] != "":
order_detail = TRBOrderFormDetail(header = order, detailPO = ph.details[i], quantity = int(kw['quantity_%d' % ph.details[i].id]))
order_details.append(order_detail)
DBSession.add_all(order_details)
DBSession.commit()
sendFrom = "r-pac-Tribal-ordering-system"
sendTo = config.trb_email_cc.split(";")
ccTo = config.trb_email_cc.split(";")
subject = "Order[%s] has been confirmed successfully!" % order.customerPO
text = ["Thank you for your confirmation!","You could view the order's detail information via the link below:",
"%s/order/viewOrder?id=%d" % (config.website_url,order.id),
"\n\n************************************************************************************",
"This e-mail is sent by the r-pac Tribal ordering system automatically.",
"Please don't reply this e-mail directly!",
"************************************************************************************"]
sendEmail(sendFrom,sendTo,subject,"\n".join(text),ccTo)
except:
traceback.print_exc()
DBSession.rollback()
flash("The service is not avaiable now,please try it later or contact the system administator.","warn")
raise
else:
flash("The order has been save successfully!")
redirect("/order/index")
@expose("tribal.templates.order.order_form_view")
@tabFocus(tab_type="main")
def viewOrder(self,**kw):
ph = DBSession.query(TRBOrderFormHeader).get(int(kw["id"]))
if len(ph.formDetails) < 1 :
flash("There's no order related to this PO!","warn")
redirect("/order/index")
return {"poheader" : ph,
"podetails" : ph.formDetails,
# "image_url" : image_url,
}
def _query_tribal(self, kw):
whereClassList = []
if kw.get("CutNbr", False):
whereClassList.append(TRBHeaderPO.cutNo == kw.get("CutNbr", ""))
if len(whereClassList):
obj = DBSession.query(TRBHeaderPO)
for condition in whereClassList: obj = obj.filter(condition)
result = obj.all()
else:
result = DBSession.query(TRBHeaderPO).all()
return result
@expose('tribal.templates.order.search')
@paginate('collections',items_per_page = 25)
@tabFocus(tab_type="view")
def search(self, **kw):
try:
search_form = order_view_form
if kw:
result = self._query_result(kw)
return dict(search_form = search_form, collections = result, values = kw)
else:
return dict(search_form = search_form, collections = [], values = {})
except:
flash("The service is not avaiable now,please try it later.",status="warn")
traceback.print_exc()
redirect("/order/index")
def _query_result(self, kw):
try:
conditions = []
# status = kw.get("orderStatus", False)
if kw.get("cutNo", False):
conditions.append(TRBHeaderPO.cutNo.like("%%%s%%" % kw.get("cutNo", "")))
# if kw.get("sub", False):
# conditions.append(JCPHeaderPO.sub == kw.get("sub", ""))
# if kw.get("lot", False):
# conditions.append(JCPHeaderPO.lot == kw.get("lot", ""))
# if kw.get("orderStatus", False):
# if status == "1":
# conditions.append(not_(JCPHeaderPO.id.in_(DBSession.query(JCPOrderForm.headerId))))
# elif status == "2":
# conditions.append(JCPHeaderPO.id == JCPOrderForm.headerId)
if kw.get("customerPO", False):
conditions.append(TRBHeaderPO.id == TRBOrderFormHeader.headerId)
conditions.append(TRBOrderForm.customerPO.like("%%%s%%" % kw.get("customerPO", "")))
if kw.get("orderStartDate",False) and kw.get("orderEndDate",False):
b_date = dt.strptime(kw.get("orderStartDate",'2009-12-1200:00:00') + "00:00:00", "%Y-%m-%d%H:%M:%S")
e_date = dt.strptime(kw.get("orderEndDate",'2009-12-1200:00:00') + "23:59:59", "%Y-%m-%d%H:%M:%S")
conditions.append(TRBOrderFormHeader.orderDate >= b_date)
conditions.append(TRBOrderFormHeader.orderDate <= e_date)
elif kw.get("orderStartDate",False):
b_date = dt.strptime(kw.get("orderStartDate",'2009-12-1200:00:00') + "00:00:00", "%Y-%m-%d%H:%M:%S")
conditions.append(TRBOrderFormHeader.orderDate >= b_date)
elif kw.get("orderEndDate",False):
e_date = dt.strptime(kw.get("orderEndDate",'2009-12-1200:00:00') + "23:59:59", "%Y-%m-%d%H:%M:%S")
conditions.append(TRBOrderFormHeader.orderDate <= e_date)
if len(conditions):
obj = DBSession.query(TRBHeaderPO)
for condition in conditions: obj = obj.filter(condition)
result = obj.filter(TRBHeaderPO.id.in_(DBSession.query(TRBOrderFormHeader.headerId))) \
.filter(TRBHeaderPO.active == 0) \
.order_by(TRBHeaderPO.id) \
.all()
else:
result = DBSession.query(TRBHeaderPO) \
.filter(TRBHeaderPO.active == 0) \
.filter(TRBHeaderPO.id.in_(DBSession.query(TRBOrderFormHeader.headerId))) \
.order_by(TRBHeaderPO.id) \
.all()
return result
except:
traceback.print_exc()
|
|
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext as _
from olympia import amo
from olympia.amo.feeds import NonAtomicFeed
from olympia.amo.urlresolvers import reverse
from olympia.amo.helpers import absolutify, url, page_name
from olympia.addons.models import Addon, Category
from .views import addon_listing, SearchToolsFilter
class AddonFeedMixin(object):
"""Common pieces for add-ons in a feed."""
def item_link(self, addon):
"""Link for a particular addon (<item><link>...</)"""
return absolutify(reverse('addons.detail', args=[addon.slug]))
def item_title(self, addon):
version = ''
if addon.current_version:
version = u' %s' % addon.current_version
return u'%s%s' % (addon.name, version)
def item_description(self, addon):
"""Description for particular add-on (<item><description>)"""
return unicode(addon.description) or ''
def item_author_name(self, addon):
"""Author for a particuar add-on (<item><dc:creator>)"""
if addon.listed_authors:
return addon.listed_authors[0].name
else:
return ''
def item_pubdate(self, addon):
"""Pubdate for a particuar add-on (<item><pubDate>)"""
sort = self.request.GET.get('sort')
return addon.created if sort == 'created' else addon.last_updated
def item_guid(self, addon):
"""Guid for a particuar version (<item><guid>)"""
url_ = reverse('addons.versions',
args=[addon.slug, addon.current_version])
return absolutify(url_)
class CategoriesRss(AddonFeedMixin, NonAtomicFeed):
def get_object(self, request, category_name=None):
"""
Get the Category for which we are about to output
the RSS feed of its Addons
"""
self.request = request
if category_name is None:
return None
q = Category.objects.filter(application=request.APP.id, type=self.TYPE)
self.category = get_object_or_404(q, slug=category_name)
return self.category
def title(self, category):
"""Title for the feed as a whole"""
name = category.name if category else _('Extensions')
return u'%s :: %s' % (name, page_name(self.request.APP))
def link(self, category):
"""Link for the feed as a whole"""
return absolutify(url('home'))
def description(self, category):
"""Description for the feed as a whole"""
if category:
# L10n: %s is a category name.
return _(u'%s Add-ons') % category.name
else:
return _('Extensions')
def items(self, category):
"""Return the Addons for this Category to be output as RSS <item>'s"""
addons, _ = addon_listing(self.request, [self.TYPE], default='updated')
if category:
addons = addons.filter(categories__id=category.id)
return addons[:20]
class ExtensionCategoriesRss(CategoriesRss):
category = None
request = None
TYPE = amo.ADDON_EXTENSION
title = _('Extensions')
def description(self, category):
"""Description for the feed as a whole."""
if category:
# L10n: %s is a category name.
return _(u'%s Add-ons') % category.name
else:
return _('Extensions')
class ThemeCategoriesRss(CategoriesRss):
category = None
request = None
TYPE = amo.ADDON_THEME
title = _('Themes')
def description(self, category):
"""Description for the feed as a whole."""
if category:
# L10n: %s is a category name.
return _(u'%s Themes') % category.name
else:
return self.title
class FeaturedRss(AddonFeedMixin, NonAtomicFeed):
request = None
def get_object(self, request):
self.request = request
self.app = request.APP
self.appname = unicode(request.APP.pretty)
def title(self):
"""Title for the feed"""
return _('Featured Add-ons :: %s') % page_name(self.app)
def link(self):
"""Link for the feed"""
return absolutify(url('home'))
def description(self):
"""Description for the feed"""
# L10n: %s is an app name.
return _("Here's a few of our favorite add-ons to help you get"
" started customizing %s.") % self.appname
def items(self):
"""Return the Addons to be output as RSS <item>'s"""
return Addon.objects.featured(self.app)[:20]
class SearchToolsRss(AddonFeedMixin, NonAtomicFeed):
category = None
request = None
TYPES = None
sort = ''
def description(self):
"""Description of this feed."""
if self.category:
# L10n: %s is a category name.
return _(u'Search tools relating to %s') % self.category.name
elif self.show_featured:
return _('Search tools and search-related extensions')
else:
return _('Search tools')
def get_object(self, request, category=None):
if category:
# Note that we don't need to include extensions
# when looking up a category
qs = Category.objects.filter(application=request.APP.id,
type=amo.ADDON_SEARCH)
self.category = get_object_or_404(qs, slug=category)
else:
self.category = None
self.request = request
self.sort = self.request.GET.get('sort', 'popular')
self.show_featured = self.sort == 'featured'
self.TYPES = [amo.ADDON_SEARCH]
if not self.category and self.show_featured:
self.TYPES.append(amo.ADDON_EXTENSION)
# We don't actually need to return anything, just hijacking the hook.
return None
def items(self):
"""Return search related Add-ons to be output as RSS <item>'s
Just like on the landing page, the following rules apply:
- when viewing featured search tools, include
extensions in the search category
- when viewing categories or any other sorting, do not
include extensions.
"""
addons, filter = addon_listing(self.request, self.TYPES,
SearchToolsFilter, default='popular')
if self.category:
addons = addons.filter(categories__id=self.category.id)
return addons[:30]
def link(self, category):
"""Link for the feed as a whole"""
if self.category:
base = url('browse.search-tools.rss', self.category.slug)
else:
base = url('browse.search-tools.rss')
return absolutify(base + '?sort=' + self.sort)
def title(self):
"""Title for the feed as a whole"""
base = _('Search Tools')
if self.category:
base = u'%s :: %s' % (self.category.name, base)
return u'%s :: %s' % (base, page_name(self.request.APP))
|
|
import _dk_core as core
from collections import namedtuple
from . import resource
from .. import blendstate
from math import floor
DEFAULT_FILENAME = 'BitstreamVeraSans.ttf'
DEFAULT_OUTLINE = 1.0
DEFAULT_DPI = (72, 72)
UniformScaleAttr = namedtuple('UniformScaleAttr', 'file, point, embolden, outline, dpi, kerning, bitmap')
FontAttr = namedtuple('FontAttr', 'attr, scale')
class UIFont:
def __init__(self, textFont, outlineFont):
self.textFont = textFont
self.outlineFont = outlineFont
def lineWidth(self, text):
return self.textFont.lineWidth(text)
def lineHeight(self):
return self.textFont.lineHeight()
@property
def baseline(self):
return self.textFont.baseline
@property
def width(self):
return self.textFont.width
@property
def height(self):
return self.textFont.height
def attributes(point, embolden=0, outline=1, dpi=DEFAULT_DPI, kerning=True, bitmap=False, file=''):
return UniformScaleAttr(file, point, embolden, outline, dpi, kerning, bitmap)
def textFont(fontAttr, scale):
assert isinstance(fontAttr, UniformScaleAttr)
attr = FontAttr(fontAttr, scale)
try:
return resource.textFonts[attr]
except KeyError:
file = fontAttr.file
if file is '':
file = DEFAULT_FILENAME
fontData = resource.pool.loadResourceData(file)
if fontData:
fontClass = core.defaultClass(core.Font)
font = fontClass(fontData,
point=fontAttr.point,
embolden=fontAttr.embolden * attr.scale,
outline=0,
dpi=tuple(int(x * attr.scale) for x in fontAttr.dpi),
enableKerning=fontAttr.kerning,
forceBitmap=fontAttr.bitmap)
resource.textFonts[attr] = font
return resource.textFonts[attr]
else:
raise FileNotFoundError
def outlineFont(fontAttr, scale):
assert isinstance(fontAttr, UniformScaleAttr)
attr = FontAttr(fontAttr, scale)
try:
return resource.outlineFonts[attr]
except KeyError:
file = fontAttr.file
if file is '':
file = DEFAULT_FILENAME
fontData = resource.pool.loadResourceData(file)
if fontData:
fontClass = core.defaultClass(core.Font)
font = fontClass(fontData,
point=fontAttr.point,
embolden=fontAttr.embolden * attr.scale,
outline=fontAttr.outline * attr.scale,
dpi=tuple(int(x * attr.scale) for x in fontAttr.dpi),
enableKerning=fontAttr.kerning,
forceBitmap=fontAttr.bitmap)
resource.outlineFonts[attr] = font
return resource.outlineFonts[attr]
else:
raise FileNotFoundError
def loadUIFont(fontAttr, scale):
assert isinstance(fontAttr, UniformScaleAttr)
if fontAttr.point > 0:
tf = textFont(fontAttr, scale)
of = None
if fontAttr.outline > 0:
of = outlineFont(fontAttr, scale)
return UIFont(tf, of)
#text-align functions
def ALIGN_TOP_LEFT(frame, width, height, baseline):
x = 0
y = frame.height - height + baseline
return x + frame.x, y + frame.y
def ALIGN_TOP(frame, width, height, baseline):
x = (frame.width - width) * 0.5
y = frame.height - height + baseline
return x + frame.x, y + frame.y
def ALIGN_TOP_RIGHT(frame, width, height, baseline):
x = frame.width - width
y = frame.height - height + baseline
return x + frame.x, y + frame.y
def ALIGN_LEFT(frame, width, height, baseline):
x = 0
y = (frame.height - height + baseline) * 0.5
return x + frame.x, y + frame.y
def ALIGN_CENTER(frame, width, height, baseline):
x = (frame.width - width) * 0.5
y = (frame.height - height + baseline) * 0.5
return x + frame.x, y + frame.y
def ALIGN_RIGHT(frame, width, height, baseline):
x = frame.width - width
y = (frame.height - height + baseline) * 0.5
return x + frame.x, y + frame.y
def ALIGN_BOTTOM_LEFT(frame, width, height, baseline):
x = 0
y = baseline
return x + frame.x, y + frame.y
def ALIGN_BOTTOM(frame, width, height, baseline):
x = (frame.width - width) * 0.5
y = baseline
return x + frame.x, y + frame.y
def ALIGN_BOTTOM_RIGHT(frame, width, height, baseline):
x = frame.width - width
y = baseline
return x + frame.x, y + frame.y
#text line-break functions
def LINE_BREAK_CLIPPING(frame, font, text):
return text
def LINE_BREAK_TRUNCATING_HEAD(frame, font, text):
width = font.lineWidth(text)
if width > frame.width:
ellipsis = '...'
if font.lineWidth(ellipsis) > frame.width:
return ''
for i in range(1, len(text)+1):
text2 = ellipsis + text[i:]
width = font.lineWidth(text2)
if width <= frame.width:
return text2
return ''
return text
def LINE_BREAK_TRUNCATING_TAIL(frame, font, text):
width = font.lineWidth(text)
if width > frame.width:
ellipsis = '...'
if font.lineWidth(ellipsis) > frame.width:
return ''
for i in range(len(text)-1, -1, -1):
text2 = text[:i] + ellipsis
width = font.lineWidth(text2)
if width <= frame.width:
return text2
return ''
return text
def LINE_BREAK_TRUNCATING_MIDDLE(frame, font, text):
width = font.lineWidth(text)
if width > frame.width:
ellipsis = '...'
if font.lineWidth(ellipsis) > frame.width:
return ''
half = len(text)//2
head = text[:half]
tail = text[half:]
while len(head) > 0 or len(tail) > 0:
tail = tail[1:]
text = head + ellipsis + tail
width = font.lineWidth(text)
if width <= frame.width:
return text
head = head[:-1]
text = head + ellipsis + tail
width = font.lineWidth(text)
if width <= frame.width:
return text
return ''
return text
def drawText(renderer,
frame,
text,
font,
textColor = core.Color(1.0, 1.0, 1.0, 1.0),
outlineColor = core.Color(0, 0, 0, 0.5),
scaleToFit=False,
align=ALIGN_CENTER,
alignToPixel=True,
linebreak=LINE_BREAK_TRUNCATING_TAIL,
blend=blendstate.defaultAlpha):
textFont = None
outlineFont = None
if font:
if isinstance(font, UIFont):
textFont = font.textFont
outlineFont = font.outlineFont
elif isinstance(font, core.Font):
textFont = font
elif isinstance(font, (tuple, list)):
c = len(font)
if c > 0:
textFont = font[0]
if c > 1:
outlineFont = font[1]
else:
raise TypeError('font argument must be Font or two Font objects tuple.')
layoutFont = textFont if textFont else outlineFont
if len(text) > 0 and frame.width > 0 and frame.height > 0 and layoutFont:
viewport = renderer.viewport[2:]
scale = renderer.bounds[2:]
scaleFactor = (viewport[0] / scale[0], viewport[1] / scale[1])
localToPixel = lambda x, y: (x * scaleFactor[0], y * scaleFactor[1])
pixelToLocal = lambda x, y: (x / scaleFactor[0], y / scaleFactor[1])
pixelFrame = core.Rect(frame)
pixelFrame.origin = localToPixel(*frame.origin)
pixelFrame.size = localToPixel(*frame.size)
text = linebreak(pixelFrame, layoutFont, text)
width = layoutFont.lineWidth(text)
height = layoutFont.lineHeight()
baseline = layoutFont.baseline
if scaleToFit:
scaleX = pixelFrame.width / width
scaleY = pixelFrame.height / height
scale = min(scaleX, scaleY)
width = width * scale
height = height * scale
baseline = baseline * scale
begin = align(pixelFrame, width, height, baseline)
if alignToPixel:
x = floor(begin[0] + 0.5)
y = floor(begin[1] + 0.5)
begin = (x, y)
end = (begin[0] + width, begin[1])
begin = core.Point(pixelToLocal(*begin))
end = core.Point(pixelToLocal(*end))
if outlineFont and outlineColor:
renderer.renderTextBaseline(begin, end, text, outlineFont, outlineColor, blend)
if textFont and textColor:
renderer.renderTextBaseline(begin, end, text, textFont, textColor, blend)
|
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import itertools
import six
from conveyor.conveyorheat.common import exception
from conveyor.conveyorheat.common import grouputils
from conveyor.conveyorheat.common import timeutils
from conveyor.conveyorheat.engine import attributes
from conveyor.conveyorheat.engine import constraints
from conveyor.conveyorheat.engine import function
from conveyor.conveyorheat.engine.hot import template
from conveyor.conveyorheat.engine import properties
from conveyor.conveyorheat.engine.resources import stack_resource
from conveyor.conveyorheat.engine import rsrc_defn
from conveyor.conveyorheat.engine import scheduler
from conveyor.conveyorheat.engine import support
from conveyor.conveyorheat.scaling import rolling_update
from conveyor.conveyorheat.scaling import template as scl_template
from conveyor.i18n import _
class ResourceGroup(stack_resource.StackResource):
"""Creates one or more identically configured nested resources.
In addition to the `refs` attribute, this resource implements synthetic
attributes that mirror those of the resources in the group. When
getting an attribute from this resource, however, a list of attribute
values for each resource in the group is returned. To get attribute values
for a single resource in the group, synthetic attributes of the form
`resource.{resource index}.{attribute name}` can be used. The resource ID
of a particular resource in the group can be obtained via the synthetic
attribute `resource.{resource index}`. Note, that if you get attribute
without `{resource index}`, e.g. `[resource, {attribute_name}]`, you'll get
a list of this attribute's value for all resources in group.
While each resource in the group will be identically configured, this
resource does allow for some index-based customization of the properties
of the resources in the group. For example::
resources:
my_indexed_group:
type: OS::Heat::ResourceGroup
properties:
count: 3
resource_def:
type: OS::Nova::Server
properties:
# create a unique name for each server
# using its index in the group
name: my_server_%index%
image: CentOS 6.5
flavor: 4GB Performance
would result in a group of three servers having the same image and flavor,
but names of `my_server_0`, `my_server_1`, and `my_server_2`. The variable
used for substitution can be customized by using the `index_var` property.
"""
support_status = support.SupportStatus(version='2014.1')
PROPERTIES = (
COUNT, INDEX_VAR, RESOURCE_DEF, REMOVAL_POLICIES,
) = (
'count', 'index_var', 'resource_def', 'removal_policies',
)
_RESOURCE_DEF_KEYS = (
RESOURCE_DEF_TYPE, RESOURCE_DEF_PROPERTIES, RESOURCE_DEF_METADATA,
) = (
'type', 'properties', 'metadata',
)
_REMOVAL_POLICIES_KEYS = (
REMOVAL_RSRC_LIST,
) = (
'resource_list',
)
_ROLLING_UPDATES_SCHEMA_KEYS = (
MIN_IN_SERVICE, MAX_BATCH_SIZE, PAUSE_TIME,
) = (
'min_in_service', 'max_batch_size', 'pause_time',
)
_BATCH_CREATE_SCHEMA_KEYS = (
MAX_BATCH_SIZE, PAUSE_TIME,
) = (
'max_batch_size', 'pause_time',
)
_UPDATE_POLICY_SCHEMA_KEYS = (
ROLLING_UPDATE, BATCH_CREATE,
) = (
'rolling_update', 'batch_create',
)
ATTRIBUTES = (
REFS, ATTR_ATTRIBUTES,
) = (
'refs', 'attributes',
)
properties_schema = {
COUNT: properties.Schema(
properties.Schema.INTEGER,
_('The number of resources to create.'),
default=1,
constraints=[
constraints.Range(min=0),
],
update_allowed=True
),
INDEX_VAR: properties.Schema(
properties.Schema.STRING,
_('A variable that this resource will use to replace with the '
'current index of a given resource in the group. Can be used, '
'for example, to customize the name property of grouped '
'servers in order to differentiate them when listed with '
'nova client.'),
default="%index%",
constraints=[
constraints.Length(min=3)
],
support_status=support.SupportStatus(version='2014.2')
),
RESOURCE_DEF: properties.Schema(
properties.Schema.MAP,
_('Resource definition for the resources in the group. The value '
'of this property is the definition of a resource just as if '
'it had been declared in the template itself.'),
schema={
RESOURCE_DEF_TYPE: properties.Schema(
properties.Schema.STRING,
_('The type of the resources in the group.'),
required=True
),
RESOURCE_DEF_PROPERTIES: properties.Schema(
properties.Schema.MAP,
_('Property values for the resources in the group.')
),
RESOURCE_DEF_METADATA: properties.Schema(
properties.Schema.MAP,
_('Supplied metadata for the resources in the group.'),
support_status=support.SupportStatus(version='5.0.0')
),
},
required=True,
update_allowed=True
),
REMOVAL_POLICIES: properties.Schema(
properties.Schema.LIST,
_('Policies for removal of resources on update.'),
schema=properties.Schema(
properties.Schema.MAP,
_('Policy to be processed when doing an update which '
'requires removal of specific resources.'),
schema={
REMOVAL_RSRC_LIST: properties.Schema(
properties.Schema.LIST,
_("List of resources to be removed "
"when doing an update which requires removal of "
"specific resources. "
"The resource may be specified several ways: "
"(1) The resource name, as in the nested stack, "
"(2) The resource reference returned from "
"get_resource in a template, as available via "
"the 'refs' attribute. "
"Note this is destructive on update when specified; "
"even if the count is not being reduced, and once "
"a resource name is removed, it's name is never "
"reused in subsequent updates."
),
default=[]
),
},
),
update_allowed=True,
default=[],
support_status=support.SupportStatus(version='2015.1')
),
}
attributes_schema = {
REFS: attributes.Schema(
_("A list of resource IDs for the resources in the group."),
type=attributes.Schema.LIST
),
ATTR_ATTRIBUTES: attributes.Schema(
_("A map of resource names to the specified attribute of each "
"individual resource. "
"Requires heat_template_version: 2014-10-16."),
support_status=support.SupportStatus(version='2014.2'),
type=attributes.Schema.MAP
),
}
rolling_update_schema = {
MIN_IN_SERVICE: properties.Schema(
properties.Schema.INTEGER,
_('The minimum number of resources in service while '
'rolling updates are being executed.'),
constraints=[constraints.Range(min=0)],
default=0),
MAX_BATCH_SIZE: properties.Schema(
properties.Schema.INTEGER,
_('The maximum number of resources to replace at once.'),
constraints=[constraints.Range(min=1)],
default=1),
PAUSE_TIME: properties.Schema(
properties.Schema.NUMBER,
_('The number of seconds to wait between batches of '
'updates.'),
constraints=[constraints.Range(min=0)],
default=0),
}
batch_create_schema = {
MAX_BATCH_SIZE: properties.Schema(
properties.Schema.INTEGER,
_('The maximum number of resources to create at once.'),
constraints=[constraints.Range(min=1)],
default=1
),
PAUSE_TIME: properties.Schema(
properties.Schema.NUMBER,
_('The number of seconds to wait between batches.'),
constraints=[constraints.Range(min=0)],
default=0
),
}
update_policy_schema = {
ROLLING_UPDATE: properties.Schema(
properties.Schema.MAP,
schema=rolling_update_schema,
support_status=support.SupportStatus(version='5.0.0')
),
BATCH_CREATE: properties.Schema(
properties.Schema.MAP,
schema=batch_create_schema,
support_status=support.SupportStatus(version='5.0.0')
)
}
def get_size(self):
return self.properties.get(self.COUNT)
def validate_nested_stack(self):
# Only validate the resource definition (which may be a
# nested template) if count is non-zero, to enable folks
# to disable features via a zero count if they wish
if not self.get_size():
return
test_tmpl = self._assemble_nested(["0"], include_all=True)
res_def = next(six.itervalues(
test_tmpl.resource_definitions(self.stack)))
# make sure we can resolve the nested resource type
self.stack.env.get_class_to_instantiate(res_def.resource_type)
try:
name = "%s-%s" % (self.stack.name, self.name)
nested_stack = self._parse_nested_stack(
name,
test_tmpl,
self.child_params())
nested_stack.strict_validate = False
nested_stack.validate()
except Exception as ex:
msg = _("Failed to validate: %s") % six.text_type(ex)
raise exception.StackValidationFailed(message=msg)
def _name_blacklist(self):
"""Resolve the remove_policies to names for removal."""
nested = self.nested()
# To avoid reusing names after removal, we store a comma-separated
# blacklist in the resource data
db_rsrc_names = self.data().get('name_blacklist')
if db_rsrc_names:
current_blacklist = db_rsrc_names.split(',')
else:
current_blacklist = []
# Now we iterate over the removal policies, and update the blacklist
# with any additional names
rsrc_names = set(current_blacklist)
if nested:
for r in self.properties[self.REMOVAL_POLICIES]:
if self.REMOVAL_RSRC_LIST in r:
# Tolerate string or int list values
for n in r[self.REMOVAL_RSRC_LIST]:
str_n = six.text_type(n)
if str_n in nested:
rsrc_names.add(str_n)
continue
rsrc = nested.resource_by_refid(str_n)
if rsrc:
rsrc_names.add(rsrc.name)
# If the blacklist has changed, update the resource data
if rsrc_names != set(current_blacklist):
self.data_set('name_blacklist', ','.join(rsrc_names))
return rsrc_names
def _resource_names(self, size=None):
name_blacklist = self._name_blacklist()
if size is None:
size = self.get_size()
def is_blacklisted(name):
return name in name_blacklist
candidates = six.moves.map(six.text_type, itertools.count())
return itertools.islice(six.moves.filterfalse(is_blacklisted,
candidates),
size)
def _count_black_listed(self):
"""Return the number of current resource names that are blacklisted."""
existing_members = grouputils.get_member_names(self)
return len(self._name_blacklist() & set(existing_members))
def handle_create(self):
if self.update_policy.get(self.BATCH_CREATE):
batch_create = self.update_policy[self.BATCH_CREATE]
max_batch_size = batch_create[self.MAX_BATCH_SIZE]
pause_sec = batch_create[self.PAUSE_TIME]
checkers = self._replace(0, max_batch_size, pause_sec)
checkers[0].start()
return checkers
else:
names = self._resource_names()
self.create_with_template(self._assemble_nested(names),
self.child_params(),
self.stack.timeout_secs())
def check_create_complete(self, checkers=None):
if checkers is None:
return super(ResourceGroup, self).check_create_complete()
for checker in checkers:
if not checker.started():
checker.start()
if not checker.step():
return False
return True
def _run_to_completion(self, template, timeout):
updater = self.update_with_template(template, {},
timeout)
while not super(ResourceGroup,
self).check_update_complete(updater):
yield
def _run_update(self, total_capacity, max_updates, timeout):
template = self._assemble_for_rolling_update(total_capacity,
max_updates)
return self._run_to_completion(template, timeout)
def check_update_complete(self, checkers):
for checker in checkers:
if not checker.started():
checker.start()
if not checker.step():
return False
return True
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
if tmpl_diff:
# parse update policy
if rsrc_defn.UPDATE_POLICY in tmpl_diff:
up = json_snippet.update_policy(self.update_policy_schema,
self.context)
self.update_policy = up
checkers = []
self.properties = json_snippet.properties(self.properties_schema,
self.context)
if prop_diff and self.RESOURCE_DEF in prop_diff:
updaters = self._try_rolling_update()
if updaters:
checkers.extend(updaters)
if not checkers:
resizer = scheduler.TaskRunner(
self._run_to_completion,
self._assemble_nested(self._resource_names()),
self.stack.timeout_mins)
checkers.append(resizer)
checkers[0].start()
return checkers
def get_attribute(self, key, *path):
if key.startswith("resource."):
return grouputils.get_nested_attrs(self, key, False, *path)
names = self._resource_names()
if key == self.REFS:
vals = [grouputils.get_rsrc_id(self, key, False, n) for n in names]
return attributes.select_from_attribute(vals, path)
if key == self.ATTR_ATTRIBUTES:
if not path:
raise exception.InvalidTemplateAttribute(
resource=self.name, key=key)
return dict((n, grouputils.get_rsrc_attr(
self, key, False, n, *path)) for n in names)
path = [key] + list(path)
return [grouputils.get_rsrc_attr(self, key, False, n, *path)
for n in names]
def build_resource_definition(self, res_name, res_defn):
res_def = copy.deepcopy(res_defn)
props = res_def.get(self.RESOURCE_DEF_PROPERTIES)
if props:
repl_props = self._handle_repl_val(res_name, props)
res_def[self.RESOURCE_DEF_PROPERTIES] = repl_props
return template.HOTemplate20130523.rsrc_defn_from_snippet(res_name,
res_def)
def get_resource_def(self, include_all=False):
"""Returns the resource definition portion of the group.
:param include_all: if False, only properties for the resource
definition that are not empty will be included
:type include_all: bool
:return: resource definition for the group
:rtype: dict
"""
# At this stage, we don't mind if all of the parameters have values
# assigned. Pass in a custom resolver to the properties to not
# error when a parameter does not have a user entered value.
def ignore_param_resolve(snippet):
while isinstance(snippet, function.Function):
try:
snippet = snippet.result()
except exception.UserParameterMissing:
return None
if isinstance(snippet, collections.Mapping):
return dict((k, ignore_param_resolve(v))
for k, v in snippet.items())
elif (not isinstance(snippet, six.string_types) and
isinstance(snippet, collections.Iterable)):
return [ignore_param_resolve(v) for v in snippet]
return snippet
self.properties.resolve = ignore_param_resolve
res_def = self.properties[self.RESOURCE_DEF]
if not include_all:
return self._clean_props(res_def)
return res_def
def _clean_props(self, res_defn):
res_def = copy.deepcopy(res_defn)
props = res_def.get(self.RESOURCE_DEF_PROPERTIES)
if props:
clean = dict((k, v) for k, v in props.items() if v is not None)
props = clean
res_def[self.RESOURCE_DEF_PROPERTIES] = props
return res_def
def _handle_repl_val(self, res_name, val):
repl_var = self.properties[self.INDEX_VAR]
def recurse(x):
return self._handle_repl_val(res_name, x)
if isinstance(val, six.string_types):
return val.replace(repl_var, res_name)
elif isinstance(val, collections.Mapping):
return {k: recurse(v) for k, v in val.items()}
elif isinstance(val, collections.Sequence):
return [recurse(v) for v in val]
return val
def _assemble_nested(self, names, include_all=False,
template_version=('heat_template_version',
'2015-04-30')):
def_dict = self.get_resource_def(include_all)
definitions = [(k, self.build_resource_definition(k, def_dict))
for k in names]
return scl_template.make_template(definitions,
version=template_version)
def _assemble_for_rolling_update(self, total_capacity, max_updates,
include_all=False,
template_version=('heat_template_version',
'2015-04-30')):
names = list(self._resource_names(total_capacity))
name_blacklist = self._name_blacklist()
valid_resources = [(n, d) for n, d in
grouputils.get_member_definitions(self)
if n not in name_blacklist]
targ_cap = self.get_size()
def replace_priority(res_item):
name, defn = res_item
try:
index = names.index(name)
except ValueError:
# High priority - delete immediately
return 0
else:
if index < targ_cap:
# Update higher indices first
return targ_cap - index
else:
# Low priority - don't update
return total_capacity
old_resources = sorted(valid_resources, key=replace_priority)
existing_names = set(n for n, d in valid_resources)
new_names = six.moves.filterfalse(lambda n: n in existing_names,
names)
res_def = self.get_resource_def(include_all)
definitions = scl_template.member_definitions(
old_resources, res_def,
total_capacity,
max_updates,
lambda: next(new_names),
self.build_resource_definition)
return scl_template.make_template(definitions,
version=template_version)
def _try_rolling_update(self):
if self.update_policy[self.ROLLING_UPDATE]:
policy = self.update_policy[self.ROLLING_UPDATE]
return self._replace(policy[self.MIN_IN_SERVICE],
policy[self.MAX_BATCH_SIZE],
policy[self.PAUSE_TIME])
def _update_timeout(self, batch_cnt, pause_sec):
total_pause_time = pause_sec * max(batch_cnt - 1, 0)
if total_pause_time >= self.stack.timeout_secs():
msg = _('The current %s will result in stack update '
'timeout.') % rsrc_defn.UPDATE_POLICY
raise ValueError(msg)
return self.stack.timeout_secs() - total_pause_time
@staticmethod
def _get_batches(targ_cap, curr_cap, batch_size, min_in_service):
updated = 0
while rolling_update.needs_update(targ_cap, curr_cap, updated):
new_cap, total_new = rolling_update.next_batch(targ_cap,
curr_cap,
updated,
batch_size,
min_in_service)
yield new_cap, total_new
updated += total_new - max(new_cap - max(curr_cap, targ_cap), 0)
curr_cap = new_cap
def _replace(self, min_in_service, batch_size, pause_sec):
def pause_between_batch(pause_sec):
duration = timeutils.Duration(pause_sec)
while not duration.expired():
yield
# blacklist count existing
num_blacklist = self._count_black_listed()
# current capacity not including existing blacklisted
curr_cap = len(self.nested()) - num_blacklist if self.nested() else 0
batches = list(self._get_batches(self.get_size(), curr_cap, batch_size,
min_in_service))
update_timeout = self._update_timeout(len(batches), pause_sec)
def tasks():
for index, (curr_cap, max_upd) in enumerate(batches):
yield scheduler.TaskRunner(self._run_update,
curr_cap, max_upd,
update_timeout)
if index < (len(batches) - 1) and pause_sec > 0:
yield scheduler.TaskRunner(pause_between_batch, pause_sec)
return list(tasks())
def child_template(self):
names = self._resource_names()
return self._assemble_nested(names)
def child_params(self):
return {}
def handle_adopt(self, resource_data):
names = self._resource_names()
if names:
return self.create_with_template(self._assemble_nested(names),
{},
adopt_data=resource_data)
def resource_mapping():
return {
'OS::Heat::ResourceGroup': ResourceGroup,
}
|
|
# csaudio.py
# import csaudio ; reload(csaudio) ; from csaudio import *
import wave
wave.big_endian = 0
def printParams(params):
print 'Parameters:'
print ' nchannels:', params[0]
print ' sampwidth:', params[1]
print ' framerate:', params[2]
print ' nframes :', params[3]
print ' comptype :', params[4]
print ' compname :', params[5]
def tr(params,rf):
""" tr transforms raw frames to floating-point samples """
samps = [ord(x) for x in rf] # convert to numeric bytes
# give parameters nicer names
nchannels = params[0]
sampwidth = params[1]
nsamples = params[3]
if sampwidth == 1:
for i in range(nsamples):
if samps[i] < 128:
samps[i] *= 256.0 # Convert to 16-bit range, floating
else:
samps[i] = (samps[i] - 256) * 256.0
elif sampwidth == 2:
newsamps = nsamples * nchannels * [0]
for i in range(nsamples * nchannels):
# The wav package gives us the data in native
# "endian-ness". The clever indexing with wave.big_endian
# makes sure we unpack in the proper byte order.
sampval = samps[2*i + 1 - wave.big_endian] * 256 \
+ samps[2*i + wave.big_endian]
if sampval >= 32768:
sampval -= 65536
newsamps[i] = float(sampval)
samps = newsamps
else:
print 'A sample width of', params[1], 'is not supported.'
print 'Returning silence.'
samps = nsamples * [0.0]
if nchannels == 2:
# Mix to mono
newsamps = nsamples * [0]
for i in range(nsamples):
newsamps[i] = (samps[2 * i] + samps[2 * i + 1]) / 2.0
samps = newsamps
return samps
def tri(params,samps):
""" tri is tr inverse, i.e. from samples to rawframes """
if params[1] == 1: # one byte per sample
samps = [int(x+127.5) for x in samps]
#print 'max, min are', max(samps), min(samps)
rf = [chr(x) for x in samps]
elif params[1] == 2: # two bytes per sample
bytesamps = (2*params[3])*[0] # start at all zeros
for i in range(params[3]):
# maybe another rounding strategy in the future?
intval = int(samps[i])
if intval > 32767: intval = 32767
if intval < -32767: intval = -32767 # maybe could be -32768
if intval < 0: intval += 65536 # Handle negative values
# The wav package wants its data in native "endian-ness".
# The clever indexing with wave.big_endian makes sure we
# pack in the proper byte order.
bytesamps[2*i + 1 - wave.big_endian] = intval / 256
bytesamps[2*i + wave.big_endian] = intval % 256
samps = bytesamps
#print 'max, min are', max(samps), min(samps)
rf = [chr(x) for x in samps]
return ''.join(rf)
def get_data(filename):
""" the file needs to be in .wav format
there are lots of conversion programs online, however,
to create .wav from .mp3 and other formats
"""
# this will complain if the file isn't there!
fin = wave.open(filename, 'rb')
params = fin.getparams()
#printParams(params)
rawFrames = fin.readframes(params[3])
# need to extract just one channel of sound data at the right width...
fin.close()
return params, rawFrames
def readwav(filename):
""" readwav returns the audio data from the file
named filename, which must be a .wav file.
Call this function as follows:
samps, sr = readwav(filename)
samps will be a list of the raw sound samples (floats)
sr will be the sampling rate for that list (integer)
"""
sound_data = [0,0]
read_wav(filename,sound_data)
samps = sound_data[0]
sr = sound_data[1]
if type(samps) != type([]): samps = [42] # default value
return samps, sr
def read_wav(filename,sound_data):
""" read_wav returns the audio data from the file
named filename (the first input) in the list
named sound_data (the second input)
If the file exists and is the correct .wav format,
then after this call sound_data will be a list of two
elements:
sound_data[0] will be a list of the raw sound samples
sound_data[1] will be the sampling rate for that list
That is, sound_data will be the following:
[ [d0, d1, d2, ...], samplingrate ]
where each d0, d1, d2, ... is a floating-point value
and sampling rate is an integer, representing the
frequency with which audio samples were taken.
No value is returned from this function!
"""
if type(sound_data) != type([]):
print """
read_wav was called with a second input,
sound_data, that was _not_ of type list.
That input needs to be a list, e.g., []
"""
return # nothing
# sound_data is a list: we create/clear its first two elements
if len(sound_data) < 1:
sound_data.append(0)
if len(sound_data) < 2:
sound_data.append(0)
# now it has at least two elements, and we reset them
sound_data[0] = 42
sound_data[1] = 42
try:
params, rf = get_data(filename)
samps = tr(params,rf)
except:
print "There was a problem with the file", filename
print "You might check if it's here and of"
print "the correct format (.wav) ... "
return # nothing
numchannels = params[0]
datawidth = params[1]
framerate = params[2]
numsamples = params[3]
print
print 'You opened', filename, 'which has'
print ' ', numsamples, 'audio samples, taken at'
print ' ', framerate, 'hertz (samples per second).'
print
sound_data[0] = samps
sound_data[1] = framerate
return # nothing
def write_data(params=None, rawFrames=None, filename="out.wav"):
""" back out to .wav format """
fout = wave.open(filename,'wb')
if params:
fout.setparams(params)
if rawFrames:
fout.writeframes(rawFrames)
else:
print 'no frames'
else:
print 'no params'
fout.close()
def writewav(samps, sr, filename):
""" write_wav saves a .wav file whose
first input parameter is the audio data as a list
second parameter is the integer sampling rate
the minimum allowed value is 1 hertz (1 sample per second),
which is well under human hearing range
third parameter is the output file name
if no name is specified, this parameter defaults to 'out.wav'
"""
write_wav([samps, sr], filename)
def write_wav(sound_data, filename="out.wav"):
""" write_wav outputs a .wav file whose
first parameter is the [audio data, srate] as a list
second parameter is the output file name
if no name is specified, this parameter defaults to 'out.wav'
"""
if type(sound_data) != type([]) or \
len(sound_data) < 2 or \
type(sound_data[0]) != type([]) or \
type(sound_data[1]) != type(42):
print """
write_wav was called with a first input,
sound_data, that was _not_ an appropriate list.
That input needs to be a list such that
sound_data[0] are the raw sound samples and
sound_data[1] is the sampling rate, e.g.,
[ [d0, d1, d2, ...], samplingrate ]
where each d0, d1, d2, ... is a floating-point value
and sampling rate is an integer, representing the
frequency with whi audio samples were taken.
"""
return # nothing
# name the two components of sound_data
data = sound_data[0]
samplingrate = sound_data[1]
# compose the file...
framerate = int(samplingrate)
if framerate < 0:
framerate = -framerate
if framerate < 1:
framerate = 1
# always 1 channel and 2 output bytes per sample
params = [1, 2, framerate, len(data), "NONE", "No compression"]
# convert to raw frames
rawframesstring = tri(params,data)
write_data(params, rawframesstring, filename)
print
print 'You have written the file', filename, 'which has'
print ' ', len(data), 'audio samples, taken at'
print ' ', samplingrate, 'hertz.'
print
return # nothing
# a useful thing to have... can be done all in sw under windows...
import os
if os.name == 'nt':
import winsound
elif os.uname()[0] == 'Linux':
import ossaudiodev
def play(filename):
""" play a .wav file for Windows, Linux, or Mac
for Mac, you need to have the "play"
application in the current folder (.)
"""
if type(filename) != type(''):
raise TypeError, 'filename must be a string'
if os.name == 'nt':
winsound.PlaySound(filename, winsound.SND_FILENAME)
elif os.uname()[0] == 'Linux':
os.system('/usr/bin/play ' + filename + ' || /usr/bin/aplay ' + filename)
# assume MAC, if not a Windows or Linux machine
# if you're using another OS, you'll need to adjust this...
else:
# this was the pre MacOS 10.5 method...
#os.system( ('./play ' + filename) )
# now, it seems that /usr/bin/afplay is provided with MacOS X
# and it seems to work in the same way play did
# perhaps Apple simply used play?
os.system( ('/usr/bin/afplay ' + filename) )
|
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains definitions for the post-activation form of Residual Networks.
Residual networks (ResNets) were proposed in:
[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
Deep Residual Learning for Image Recognition. arXiv:1512.03385
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from mlperf_compliance import mlperf_log
from mlperf_compliance import resnet_log_helper
BATCH_NORM_DECAY = 0.9
BATCH_NORM_EPSILON = 1e-5
def batch_norm_relu(inputs, is_training, relu=True, init_zero=False,
data_format='channels_first'):
"""Performs a batch normalization followed by a ReLU.
Args:
inputs: `Tensor` of shape `[batch, channels, ...]`.
is_training: `bool` for whether the model is training.
relu: `bool` if False, omits the ReLU operation.
init_zero: `bool` if True, initializes scale parameter of batch
normalization with 0 instead of 1 (default).
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
A normalized `Tensor` with the same `data_format`.
"""
if init_zero:
gamma_initializer = tf.zeros_initializer()
else:
gamma_initializer = tf.ones_initializer()
if data_format == 'channels_first':
axis = 1
else:
axis = 3
outputs = tf.layers.batch_normalization(
inputs=inputs,
axis=axis,
momentum=BATCH_NORM_DECAY,
epsilon=BATCH_NORM_EPSILON,
center=True,
scale=True,
training=is_training,
fused=True,
gamma_initializer=gamma_initializer)
if is_training:
resnet_log_helper.log_batch_norm(
input_tensor=inputs,
output_tensor=outputs,
momentum=BATCH_NORM_DECAY,
epsilon=BATCH_NORM_EPSILON,
center=True,
scale=True,
training=is_training)
if relu:
if is_training:
mlperf_log.resnet_print(key=mlperf_log.MODEL_HP_RELU)
outputs = tf.nn.relu(outputs)
return outputs
def fixed_padding(inputs, kernel_size, data_format='channels_first'):
"""Pads the input along the spatial dimensions independently of input size.
Args:
inputs: `Tensor` of size `[batch, channels, height, width]` or
`[batch, height, width, channels]` depending on `data_format`.
kernel_size: `int` kernel size to be used for `conv2d` or max_pool2d`
operations. Should be a positive integer.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
A padded `Tensor` of the same `data_format` with size either intact
(if `kernel_size == 1`) or padded (if `kernel_size > 1`).
"""
pad_total = kernel_size - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
if data_format == 'channels_first':
padded_inputs = tf.pad(inputs, [[0, 0], [0, 0],
[pad_beg, pad_end], [pad_beg, pad_end]])
else:
padded_inputs = tf.pad(inputs, [[0, 0], [pad_beg, pad_end],
[pad_beg, pad_end], [0, 0]])
return padded_inputs
def conv2d_fixed_padding(inputs,
filters,
kernel_size,
strides,
is_training,
data_format='channels_first'):
"""Strided 2-D convolution with explicit padding.
The padding is consistent and is based only on `kernel_size`, not on the
dimensions of `inputs` (as opposed to using `tf.layers.conv2d` alone).
Args:
inputs: `Tensor` of size `[batch, channels, height_in, width_in]`.
filters: `int` number of filters in the convolution.
kernel_size: `int` size of the kernel to be used in the convolution.
strides: `int` strides of the convolution.
is_training: `bool` for whether the model is in training.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
A `Tensor` of shape `[batch, filters, height_out, width_out]`.
"""
inputs_for_logging = inputs
if strides > 1:
inputs = fixed_padding(inputs, kernel_size, data_format=data_format)
outputs = tf.layers.conv2d(
inputs=inputs,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=('SAME' if strides == 1 else 'VALID'),
use_bias=False,
kernel_initializer=tf.variance_scaling_initializer(),
data_format=data_format)
if is_training:
resnet_log_helper.log_conv2d(
input_tensor=inputs_for_logging,
output_tensor=outputs,
stride=strides,
filters=filters,
initializer=mlperf_log.TRUNCATED_NORMAL,
use_bias=False)
return outputs
def residual_block(inputs, filters, is_training, strides,
use_projection=False, data_format='channels_first'):
"""Standard building block for residual networks with BN after convolutions.
Args:
inputs: `Tensor` of size `[batch, channels, height, width]`.
filters: `int` number of filters for the first two convolutions. Note that
the third and final convolution will use 4 times as many filters.
is_training: `bool` for whether the model is in training.
strides: `int` block stride. If greater than 1, this block will ultimately
downsample the input.
use_projection: `bool` for whether this block should use a projection
shortcut (versus the default identity shortcut). This is usually `True`
for the first block of a block group, which may change the number of
filters and the resolution.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
The output `Tensor` of the block.
"""
shortcut = inputs
if use_projection:
# Projection shortcut in first layer to match filters and strides
shortcut = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=1,
strides=strides,
is_training=is_training,
data_format=data_format)
shortcut = batch_norm_relu(shortcut, is_training, relu=False,
data_format=data_format)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=3,
strides=strides,
is_training=is_training,
data_format=data_format)
inputs = batch_norm_relu(inputs, is_training, data_format=data_format)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=3,
strides=1,
is_training=is_training,
data_format=data_format)
inputs = batch_norm_relu(inputs, is_training, relu=False, init_zero=True,
data_format=data_format)
return tf.nn.relu(inputs + shortcut)
def bottleneck_block(inputs, filters, is_training, strides,
use_projection=False, data_format='channels_first'):
"""Bottleneck block variant for residual networks with BN after convolutions.
Args:
inputs: `Tensor` of size `[batch, channels, height, width]`.
filters: `int` number of filters for the first two convolutions. Note that
the third and final convolution will use 4 times as many filters.
is_training: `bool` for whether the model is in training.
strides: `int` block stride. If greater than 1, this block will ultimately
downsample the input.
use_projection: `bool` for whether this block should use a projection
shortcut (versus the default identity shortcut). This is usually `True`
for the first block of a block group, which may change the number of
filters and the resolution.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
The output `Tensor` of the block.
"""
if is_training:
mlperf_log.resnet_print(
key=mlperf_log.MODEL_HP_BLOCK_TYPE, value=mlperf_log.BOTTLENECK_BLOCK)
resnet_log_helper.log_begin_block(
input_tensor=inputs, block_type=mlperf_log.BOTTLENECK_BLOCK)
shortcut = inputs
if use_projection:
# Projection shortcut only in first block within a group. Bottleneck blocks
# end with 4 times the number of filters.
filters_out = 4 * filters
shortcut = conv2d_fixed_padding(
inputs=inputs,
filters=filters_out,
kernel_size=1,
strides=strides,
is_training=is_training,
data_format=data_format)
shortcut = batch_norm_relu(shortcut, is_training, relu=False,
data_format=data_format)
if is_training:
resnet_log_helper.log_projection(
input_tensor=inputs, output_tensor=shortcut)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=1,
strides=1,
is_training=is_training,
data_format=data_format)
inputs = batch_norm_relu(inputs, is_training, data_format=data_format)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=3,
strides=strides,
is_training=is_training,
data_format=data_format)
inputs = batch_norm_relu(inputs, is_training, data_format=data_format)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=4 * filters,
kernel_size=1,
strides=1,
is_training=is_training,
data_format=data_format)
inputs = batch_norm_relu(inputs, is_training, relu=False, init_zero=True,
data_format=data_format)
output = tf.nn.relu(inputs + shortcut)
if is_training:
mlperf_log.resnet_print(key=mlperf_log.MODEL_HP_SHORTCUT_ADD)
mlperf_log.resnet_print(key=mlperf_log.MODEL_HP_RELU)
resnet_log_helper.log_end_block(output_tensor=output)
return output
def block_group(inputs, filters, block_fn, blocks, strides, is_training, name,
data_format='channels_first'):
"""Creates one group of blocks for the ResNet model.
Args:
inputs: `Tensor` of size `[batch, channels, height, width]`.
filters: `int` number of filters for the first convolution of the layer.
block_fn: `function` for the block to use within the model
blocks: `int` number of blocks contained in the layer.
strides: `int` stride to use for the first convolution of the layer. If
greater than 1, this layer will downsample the input.
is_training: `bool` for whether the model is training.
name: `str`name for the Tensor output of the block layer.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
The output `Tensor` of the block layer.
"""
# Drop batch size from shape logging.
if is_training:
mlperf_log.resnet_print(
key=mlperf_log.MODEL_HP_INITIAL_SHAPE, value=inputs.shape.as_list()[1:])
# Only the first block per block_group uses projection shortcut and strides.
inputs = block_fn(inputs, filters, is_training, strides,
use_projection=True, data_format=data_format)
for _ in range(1, blocks):
inputs = block_fn(inputs, filters, is_training, 1,
data_format=data_format)
return tf.identity(inputs, name)
def resnet_v1_generator(block_fn, layers, num_classes,
data_format='channels_first'):
"""Generator for ResNet v1 models.
Args:
block_fn: `function` for the block to use within the model. Either
`residual_block` or `bottleneck_block`.
layers: list of 4 `int`s denoting the number of blocks to include in each
of the 4 block groups. Each group consists of blocks that take inputs of
the same resolution.
num_classes: `int` number of possible classes for image classification.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
Model `function` that takes in `inputs` and `is_training` and returns the
output `Tensor` of the ResNet model.
"""
def model(inputs, is_training):
"""Creation of the model graph."""
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=64,
kernel_size=7,
strides=2,
is_training=is_training,
data_format=data_format)
inputs = tf.identity(inputs, 'initial_conv')
inputs = batch_norm_relu(inputs, is_training, data_format=data_format)
pooled_inputs = tf.layers.max_pooling2d(
inputs=inputs, pool_size=3, strides=2, padding='SAME',
data_format=data_format)
if is_training:
resnet_log_helper.log_max_pool(input_tensor=inputs,
output_tensor=pooled_inputs)
inputs = tf.identity(pooled_inputs, 'initial_max_pool')
inputs = block_group(
inputs=inputs, filters=64, block_fn=block_fn, blocks=layers[0],
strides=1, is_training=is_training, name='block_group1',
data_format=data_format)
inputs = block_group(
inputs=inputs, filters=128, block_fn=block_fn, blocks=layers[1],
strides=2, is_training=is_training, name='block_group2',
data_format=data_format)
inputs = block_group(
inputs=inputs, filters=256, block_fn=block_fn, blocks=layers[2],
strides=2, is_training=is_training, name='block_group3',
data_format=data_format)
inputs = block_group(
inputs=inputs, filters=512, block_fn=block_fn, blocks=layers[3],
strides=2, is_training=is_training, name='block_group4',
data_format=data_format)
# The activation is 7x7 so this is a global average pool.
# TODO(huangyp): reduce_mean will be faster.
pool_size = (inputs.shape[1], inputs.shape[2])
inputs = tf.layers.average_pooling2d(
inputs=inputs, pool_size=pool_size, strides=1, padding='VALID',
data_format=data_format)
inputs = tf.identity(inputs, 'final_avg_pool')
inputs = tf.reshape(
inputs, [-1, 2048 if block_fn is bottleneck_block else 512])
if is_training:
mlperf_log.resnet_print(key=mlperf_log.MODEL_HP_DENSE, value=num_classes)
inputs = tf.layers.dense(
inputs=inputs,
units=num_classes,
kernel_initializer=tf.random_normal_initializer(stddev=.01))
inputs = tf.identity(inputs, 'final_dense')
if is_training:
mlperf_log.resnet_print(
key=mlperf_log.MODEL_HP_FINAL_SHAPE, value=inputs.shape.as_list()[1:])
return inputs
model.default_image_size = 224
return model
def resnet_v1(resnet_depth, num_classes, data_format='channels_first'):
"""Returns the ResNet model for a given size and number of output classes."""
model_params = {
18: {'block': residual_block, 'layers': [2, 2, 2, 2]},
34: {'block': residual_block, 'layers': [3, 4, 6, 3]},
50: {'block': bottleneck_block, 'layers': [3, 4, 6, 3]},
101: {'block': bottleneck_block, 'layers': [3, 4, 23, 3]},
152: {'block': bottleneck_block, 'layers': [3, 8, 36, 3]},
200: {'block': bottleneck_block, 'layers': [3, 24, 36, 3]}
}
if resnet_depth not in model_params:
raise ValueError('Not a valid resnet_depth:', resnet_depth)
params = model_params[resnet_depth]
return resnet_v1_generator(
params['block'], params['layers'], num_classes, data_format)
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com thumbor@googlegroups.com
from __future__ import print_function
import random
import unicodedata
from io import BytesIO
from unittest import TestCase as PythonTestCase
import mimetypes
from os.path import exists, realpath, dirname, join
import mock
from PIL import Image
from ssim import compute_ssim
from preggy import create_assertions
from six import StringIO
from six.moves.urllib.parse import urlencode
from thumbor.app import ThumborServiceApp
from thumbor.context import Context, RequestParameters
from thumbor.config import Config
from thumbor.importer import Importer
from thumbor.transformer import Transformer
from thumbor.engines.pil import Engine as PilEngine
from tornado.testing import AsyncHTTPTestCase
try:
unicode # Python 2
except NameError:
unicode = str # Python 3
@create_assertions
def to_exist(topic):
return exists(topic)
def normalize_unicode_path(path):
normalized_path = path
for format in ['NFD', 'NFC', 'NFKD', 'NFKC']:
normalized_path = unicodedata.normalize(format, unicode(path))
if exists(normalized_path):
break
return normalized_path
@create_assertions
def to_be_the_same_as(topic, expected):
topic = normalize_unicode_path(topic)
expected = normalize_unicode_path(expected)
if not exists(topic):
raise AssertionError("File at %s does not exist" % topic)
if not exists(expected):
raise AssertionError("File at %s does not exist" % expected)
topic_image = Image.open(topic)
expected_image = Image.open(expected)
return get_ssim(topic_image, expected_image) > 0.95
@create_assertions
def to_be_similar_to(topic, expected):
topic_image = Image.open(StringIO(topic))
expected_image = Image.open(StringIO(expected))
return get_ssim(topic_image, expected_image) > 0.95
@create_assertions
def to_be_webp(topic):
im = Image.open(StringIO(topic))
return im.format.lower() == 'webp'
@create_assertions
def to_be_png(topic):
im = Image.open(StringIO(topic))
return im.format.lower() == 'png'
@create_assertions
def to_be_gif(topic):
im = Image.open(StringIO(topic))
return im.format.lower() == 'gif'
@create_assertions
def to_be_jpeg(topic):
im = Image.open(StringIO(topic))
return im.format.lower() == 'jpeg'
def get_ssim(actual, expected):
if actual.size[0] != expected.size[0] or actual.size[1] != expected.size[1]:
raise RuntimeError(
"Can't calculate SSIM for images of different sizes (one is %dx%d, the other %dx%d)." % (
actual.size[0], actual.size[1],
expected.size[0], expected.size[1],
)
)
return compute_ssim(actual, expected)
@create_assertions
def to_be_resized(image):
return image.has_resized_properly()
@create_assertions
def to_be_cropped(image):
return image.has_cropped_properly()
def encode_multipart_formdata(fields, files):
BOUNDARY = 'thumborUploadFormBoundary'
CRLF = '\r\n'
L = []
for key, value in fields.items():
L.append('--' + BOUNDARY)
L.append('Content-Disposition: form-data; name="%s"' % key)
L.append('')
L.append(value)
for (key, filename, value) in files:
L.append('--' + BOUNDARY)
L.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
L.append('Content-Type: %s' % mimetypes.guess_type(filename)[0] or 'application/octet-stream')
L.append('')
L.append(value)
L.append('')
L.append('')
L.append('--' + BOUNDARY + '--')
body = CRLF.join([str(item) for item in L])
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
class TestCase(AsyncHTTPTestCase):
_multiprocess_can_split_ = True
def get_app(self):
self.context = self.get_context()
return ThumborServiceApp(self.context)
def get_config(self):
return Config()
def get_server(self):
return None
def get_importer(self):
return None
def get_request_handler(self):
return None
def get_context(self):
self.config = self.get_config()
self.server = self.get_server()
self.importer = self.get_importer()
self.request_handler = self.get_request_handler()
return Context(
self.server,
self.config,
self.importer,
self.request_handler
)
def get(self, path, headers):
return self.fetch(path,
method='GET',
body=urlencode({}, doseq=True),
headers=headers,
allow_nonstandard_methods=True)
def post(self, path, headers, body):
return self.fetch(path,
method='POST',
body=body,
headers=headers,
allow_nonstandard_methods=True)
def put(self, path, headers, body):
return self.fetch(path,
method='PUT',
body=body,
headers=headers,
allow_nonstandard_methods=True)
def delete(self, path, headers):
return self.fetch(path,
method='DELETE',
body=urlencode({}, doseq=True),
headers=headers,
allow_nonstandard_methods=True)
def post_files(self, path, data={}, files=[]):
multipart_data = encode_multipart_formdata(data, files)
return self.fetch(path,
method='POST',
body=multipart_data[1],
headers={
'Content-Type': multipart_data[0]
},
allow_nonstandard_methods=True)
class FilterTestCase(PythonTestCase):
_multiprocess_can_split_ = True
def setUp(self):
self.context = {}
def get_filter(self, filter_name, params_string="", config_context=None):
config = Config(
FILTERS=[filter_name],
LOADER='thumbor.loaders.file_loader',
FILE_LOADER_ROOT_PATH=join(dirname(realpath(__file__)), 'fixtures', 'filters')
)
importer = Importer(config)
importer.import_modules()
req = RequestParameters()
context = Context(config=config, importer=importer)
context.request = req
context.request.engine = context.modules.engine
if config_context is not None:
config_context(context)
self.context = context
fltr = importer.filters[0]
fltr.pre_compile()
context.transformer = Transformer(context)
return fltr(params_string, context=context)
def get_fixture_path(self, name):
return './tests/fixtures/filters/%s' % name
def get_fixture(self, name):
im = Image.open(self.get_fixture_path(name))
return im.convert('RGB')
def get_filtered(self, source_image, filter_name, params_string, config_context=None):
fltr = self.get_filter(filter_name, params_string, config_context)
im = Image.open(self.get_fixture_path(source_image))
img_buffer = BytesIO()
# Special case for the quality test, because the quality filter doesn't really affect
# the image, it only sets a context value for use on save. But here we convert the result,
# we do not save it
if params_string == 'quality(10)':
im.save(img_buffer, 'JPEG', quality=10)
fltr.engine.load(img_buffer.getvalue(), '.jpg')
else:
im.save(img_buffer, 'PNG', quality=100)
fltr.engine.load(img_buffer.getvalue(), '.png')
fltr.context.transformer.img_operation_worker()
def dummy_callback(*args):
pass
fltr.run(dummy_callback)
fltr.engine.image = fltr.engine.image.convert('RGB')
return fltr.engine.image
def get_ssim(self, actual, expected):
return get_ssim(actual, expected)
def debug(self, image):
im = Image.fromarray(image)
path = '/tmp/debug_image_%s.jpg' % random.randint(1, 10000)
im.save(path, 'JPEG')
print('The debug image was in %s.' % path)
def debug_size(self, image):
im = Image.fromarray(image)
print("Image dimensions are %dx%d (shape is %s)" % (im.size[0], im.size[1], image.shape))
class DetectorTestCase(PythonTestCase):
_multiprocess_can_split_ = True
def setUp(self):
self.context = mock.Mock(request=mock.Mock(focal_points=[]))
self.engine = PilEngine(self.context)
self.context.modules.engine = self.engine
|
|
from .utils import lzip, copy_func, find_in_bases
from .constants import *
from .exception import *
import functools
import inspect
__all__ = ["mediator_meta", "TemplateFunctionMeta"]
def mediator_meta(*metaclasses):
"""
Return a metaclass that acts as a mediator for multiple metaclasses.
EXAMPLE
=======
----
>>> class Meta1(type):
def __new__(metacls, name, bases, kwargs):
print('metaclass 1')
return super().__new__(metacls, name, bases, kwargs)
>>> class Meta2(type):
def __new__(metacls, name, bases, kwargs):
print('metaclass 2')
return super().__new__(metacls, name, bases, kwargs)
>>> class A(metaclass=mediator_meta(Meta1, Meta2)):
pass
metaclass 1
metaclass 2
>>>
----
"""
class MediatorMeta(*metaclasses):
pass
return MediatorMeta
class TemplateFunctionMeta(type):
r"""
The metaclass for a Template Function.
"""
def __new__(metacls, name, bases, kwargs, flags={}):
# Warning registry for special cases.
# (Special cases outlined by the messages in
# TFuncWarnings).
kwargs['_warnings'] = []
# Find the __call__ function.
call = kwargs.get('__call__')
if call is None:
call = _find_in_bases(bases, '__call__')
# __unimplemented__ lets us know if a TemplateFunction is a function
# which can be called, or simply serves as an abstract base which cannot
# be called but instead inherited from and called as child functions.
#
# If __unimplemented__ is True, an error will be raised in
# TemplateFunctionMeta.__call__.
if call is None:
kwargs['__unimplemented__'] = True
return super().__new__(metacls, name, bases, kwargs)
else:
if isinstance(call, TemplateFunctionMeta):
call = call.__function__
# Copy call so any changes we make to it don't affect the original.
call = copy_func(call)
# Move __call__.__doc__ over to class.__doc__. This is just semantic;
# it just makes the help information more viable to users calling
# ``help`` on a TemplateFunction.
if call.__doc__:
kwargs['__doc__'] = call.__doc__
elif flags.get('docstring'):
kwargs['__doc__'] = flags['docstring'].__doc__
if not (kwargs.get('__unimplemented__') or \
flags.get('unimplemented')):
# Unless the user explicitly stated that the
# function is to remain unimplemented, set it
# to be implemented.
kwargs['__unimplemented__'] = False
unwrap_level = flags.get('unwrap_level')
# "Undecorate" a decorated TemplateFunction.
if unwrap_level:
if unwrap_level == UNWRAP_ALL:
while getattr(call, '__wrapped__', None):
call = getattr(call, '__wrapped__')
else:
for i in range(unwrap_level):
wrapped = getattr(call, '__wrapped__', None)
if wrapped is None:
break
call = wrapped
# has_defaults is a flag that serves to indicate
# later whether or not a warning should be raised
has_defaults = False
# Handle the specific default parameters
# (PARAM_DEFAULT & PARAM_VARIABLE)
if call.__defaults__:
# Here we actually change the values of the default parameters
# depending on what the class-wide variables are.
arg_names = inspect.getfullargspec(call).args
defaults = call.__defaults__
new_defaults = []
# This funky iterator will iterate through the names of the parameters
# with default arguments along with their associated default value.
for attr, val in reversed(lzip(*map(reversed, (arg_names, defaults)))):
if val not in {PARAM_DEFAULT, PARAM_VARIABLE}:
new_defaults.append(val)
continue
if attr in kwargs:
val = kwargs[attr]
else:
has_defaults = True
# Check the inherited functions to see if they sport the
# attribute we're looking for.
for base in bases:
val = base.__dict__.get(attr)
if val is not None:
break
if val is None:
val = PARAM_DEFAULT
if not kwargs['__unimplemented__']:
kwargs['_warnings'].append(
TFuncWarnings.call_with_default
)
if val == PARAM_VARIABLE:
if new_defaults:
raise ParameterError(
"Default arguments set to "
"VARIABLE must come first."
)
continue
new_defaults.append(val)
call.__defaults__ = tuple(new_defaults)
# Apply any optional decorators to the function.
decorators = kwargs.get('__decorators__', flags.get('decorators'))
if decorators is None:
decorators = _find_in_bases(bases, '__decorators__')
if decorators:
if has_defaults:
# This actually shouldn't cause any interference with
# decorators but it's nice to warn just in case.
kwargs['_warnings'].append(
TFuncWarnings.decorate_with_default
)
if not isinstance(decorators, (list, tuple)):
decorators = (decorators, )
for decorator in decorators:
old_call = call
call = decorator(call)
call.__wrapped__ = old_call
kwargs['__decorators__'] = decorators if decorators else []
kwargs['__function__'] = call
return super().__new__(metacls, name, bases, kwargs)
# Need to implement this, otherwise type.__init__ is called
# which will raise a TypeError if flags are supplied.
def __init__(cls, name, bases, kwargs, flags={}):
type.__init__(cls, name, bases, kwargs)
def __call__(cls, *args, **kwargs):
r"""
X.__call__(*args, **kwargs) <==> X(*args, **kwargs)
"""
if cls.__unimplemented__:
raise NotImplementedError(
"'%s' template function is not implemented." %\
cls.__name__
)
if cls._warnings:
for warning in cls._warnings:
warning.warn()
return cls.__function__(cls, *args, **kwargs)
# This would have to be in TemplateFunctionMeta's metaclass
# for it to work anyways.
#
# def __instancecheck__(cls, value):
# r"""
# X.__instancecheck__(types) <==> isinstance(X, types)
# """
# # Urgh, python's ``isinstance`` doesn't actually
# # call __instancecheck__ for some dumb reason so
# # this doesn't work, and it is beyond my power to
# # fix it.
# if types.FunctionType == value or \
# (isinstance(value, (list, tuple)) and \
# types.FunctionType in value):
# return True
# return isinstance(cls, value)
def __getattr__(cls, key):
r"""
X.__getattr__(key) <==> X.key
"""
# If the attribute in question is part of the class dict,
# return it. If it is part of the __function__ dict, return
# that instead (class attributes have higher priorety however).
try:
return cls.__dict__[key]
except KeyError:
try:
return cls.__function__.__dict__[key]
except KeyError:
raise AttributeError(
"%s has no attribute '%s'." % \
(cls.__name__, key)
)
def __setattr__(cls, key, val):
r"""
X.__setattr__(key, val) <==> X.key = val
"""
if key in cls.__function__.__dict__:
cls.__function__.__dict__[key] = val
else:
super().__setattr__(key, val)
def __get__(cls, instance, owner):
r"""
Implement __get__ so TemplateFunctions can be used as methods.
"""
wraps = functools.wraps(cls.__function__)
if instance is None:
@wraps
def wrapped(*args, **kwargs):
return cls.__function__(cls, *args, **kwargs)
else:
@wraps
def wrapped(*args, **kwargs):
return cls.__function__(cls, instance, *args, **kwargs)
return wrapped
def __repr__(cls):
r"""
X.__repr__() <==> repr(X)
"""
return "<TemplateFunction:: '%s' at 0x%x>" % \
(cls.__name__, id(cls))
def __invert__(cls):
r"""
X.__invert__() <==> ~X
"""
if not hasattr(cls.__function__, '__wrapped__'):
return cls
class UnwrappedFunction(TemplateFunction):
__call__ = cls.__function__.__wrapped__
return UnwrappedFunction
def __mul__(cls, other):
r"""
X.__mul__(Y) <==> X * Y
Multiplication of two TemplateFunctions or a TemplateFunction and a
normal function will result in a "CompositeFunction", that is, a
TemplateFunction that when called first calls the second function,
then takes the result and feeds it to the first function.
It can be likened to mathematical functions, whose composition is
defined as so:
(composition of f and g)(x) = f(g(x))
"""
class CompositeFunction(TemplateFunction):
def __call__(cls_, *args, **kwargs):
result = other(*args, **kwargs)
return cls(result)
return CompositeFunction
def __pow__(cls, other):
r"""
X.__pow__(Y) <==> X ** Y
Similar to composing two functions via ``__mul__``, except the
result of calling ``other`` is interpretted as *args and **kwargs
to be sent into the first function, as opposed to a lone argument
that gets passed in when using ``__mul__``.
"""
class CompositeFunction(TemplateFunction):
def __call__(cls, *args, **kwargs):
nargs, nkwargs = other(*args, **kwargs)
return cls(*nargs, **nkwargs)
return CompositeFunction
@property
def parameters(cls):
r"""
Return the parameters of the function.
"""
return inspect.getfullargspec(cls.__function__)
@property
def decorators(cls):
r"""
Return the decorators applying to the function.
"""
return cls.__decorators__
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for building profiler options."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.python.profiler import tfprof_logger
from tensorflow.python.util.tf_export import tf_export
@tf_export(v1=['profiler.ProfileOptionBuilder'])
class ProfileOptionBuilder(object):
# pylint: disable=line-too-long
"""Option Builder for Profiling API.
For tutorial on the options, see
https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/options.md
```python
# Users can use pre-built options:
opts = (
tf.profiler.ProfileOptionBuilder.trainable_variables_parameter())
# Or, build your own options:
opts = (tf.profiler.ProfileOptionBuilder()
.with_max_depth(10)
.with_min_micros(1000)
.select(['accelerator_micros'])
.with_stdout_output()
.build()
# Or customize the pre-built options:
opts = (tf.profiler.ProfileOptionBuilder(
tf.profiler.ProfileOptionBuilder.time_and_memory())
.with_displaying_options(show_name_regexes=['.*rnn.*'])
.build())
# Finally, profiling with the options:
_ = tf.profiler.profile(tf.get_default_graph(),
run_meta=run_meta,
cmd='scope',
options=opts)
```
"""
# pylint: enable=line-too-long
def __init__(self, options=None):
"""Constructor.
Args:
options: Optional initial option dict to start with.
"""
if options is not None:
self._options = copy.deepcopy(options)
else:
self._options = {'max_depth': 100,
'min_bytes': 0,
'min_micros': 0,
'min_params': 0,
'min_float_ops': 0,
'min_occurrence': 0,
'order_by': 'name',
'account_type_regexes': ['.*'],
'start_name_regexes': ['.*'],
'trim_name_regexes': [],
'show_name_regexes': ['.*'],
'hide_name_regexes': [],
'account_displayed_op_only': False,
'select': ['micros'],
'step': -1,
'output': 'stdout'}
@staticmethod
def trainable_variables_parameter():
"""Options used to profile trainable variable parameters.
Normally used together with 'scope' view.
Returns:
A dict of profiling options.
"""
return {'max_depth': 10000,
'min_bytes': 0,
'min_micros': 0,
'min_params': 0,
'min_float_ops': 0,
'min_occurrence': 0,
'order_by': 'name',
'account_type_regexes': [tfprof_logger.TRAINABLE_VARIABLES],
'start_name_regexes': ['.*'],
'trim_name_regexes': [],
'show_name_regexes': ['.*'],
'hide_name_regexes': [],
'account_displayed_op_only': True,
'select': ['params'],
'step': -1,
'output': 'stdout'}
@staticmethod
def float_operation():
# pylint: disable=line-too-long
"""Options used to profile float operations.
Please see https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/profile_model_architecture.md
on the caveats of calculating float operations.
Returns:
A dict of profiling options.
"""
# pylint: enable=line-too-long
return {'max_depth': 10000,
'min_bytes': 0,
'min_micros': 0,
'min_params': 0,
'min_float_ops': 1,
'min_occurrence': 0,
'order_by': 'float_ops',
'account_type_regexes': ['.*'],
'start_name_regexes': ['.*'],
'trim_name_regexes': [],
'show_name_regexes': ['.*'],
'hide_name_regexes': [],
'account_displayed_op_only': True,
'select': ['float_ops'],
'step': -1,
'output': 'stdout'}
@staticmethod
def time_and_memory(min_micros=1, min_bytes=1, min_accelerator_micros=0,
min_cpu_micros=0, min_peak_bytes=0, min_residual_bytes=0,
min_output_bytes=0):
"""Show operation time and memory consumptions.
Args:
min_micros: Only show profiler nodes with execution time
no less than this. It sums accelerator and cpu times.
min_bytes: Only show profiler nodes requested to allocate no less bytes
than this.
min_accelerator_micros: Only show profiler nodes spend no less than
this time on accelerator (e.g. GPU).
min_cpu_micros: Only show profiler nodes spend no less than
this time on cpu.
min_peak_bytes: Only show profiler nodes using no less than this bytes
at peak (high watermark). For profiler nodes consist of multiple
graph nodes, it sums the graph nodes' peak_bytes.
min_residual_bytes: Only show profiler nodes have no less than
this bytes not being de-allocated after Compute() ends. For
profiler nodes consist of multiple graph nodes, it sums the
graph nodes' residual_bytes.
min_output_bytes: Only show profiler nodes have no less than this bytes
output. The output are not necessarily allocated by this profiler
nodes.
Returns:
A dict of profiling options.
"""
return {'max_depth': 10000,
'min_bytes': min_bytes,
'min_peak_bytes': min_peak_bytes,
'min_residual_bytes': min_residual_bytes,
'min_output_bytes': min_output_bytes,
'min_micros': min_micros,
'min_accelerator_micros': min_accelerator_micros,
'min_cpu_micros': min_cpu_micros,
'min_params': 0,
'min_float_ops': 0,
'min_occurrence': 0,
'order_by': 'micros',
'account_type_regexes': ['.*'],
'start_name_regexes': ['.*'],
'trim_name_regexes': [],
'show_name_regexes': ['.*'],
'hide_name_regexes': [],
'account_displayed_op_only': True,
'select': ['micros', 'bytes'],
'step': -1,
'output': 'stdout'}
def build(self):
"""Build a profiling option.
Returns:
A dict of profiling options.
"""
return copy.deepcopy(self._options)
def with_max_depth(self, max_depth):
"""Set the maximum depth of display.
The depth depends on profiling view. For 'scope' view, it's the
depth of name scope hierarchy (tree), for 'op' view, it's the number
of operation types (list), etc.
Args:
max_depth: Maximum depth of the data structure to display.
Returns:
self
"""
self._options['max_depth'] = max_depth
return self
def with_min_memory(self,
min_bytes=0,
min_peak_bytes=0,
min_residual_bytes=0,
min_output_bytes=0):
"""Only show profiler nodes consuming no less than 'min_bytes'.
Args:
min_bytes: Only show profiler nodes requested to allocate no less bytes
than this.
min_peak_bytes: Only show profiler nodes using no less than this bytes
at peak (high watermark). For profiler nodes consist of multiple
graph nodes, it sums the graph nodes' peak_bytes.
min_residual_bytes: Only show profiler nodes have no less than
this bytes not being de-allocated after Compute() ends. For
profiler nodes consist of multiple graph nodes, it sums the
graph nodes' residual_bytes.
min_output_bytes: Only show profiler nodes have no less than this bytes
output. The output are not necessarily allocated by this profiler
nodes.
Returns:
self
"""
self._options['min_bytes'] = min_bytes
self._options['min_peak_bytes'] = min_peak_bytes
self._options['min_residual_bytes'] = min_residual_bytes
self._options['min_output_bytes'] = min_output_bytes
return self
def with_min_execution_time(self,
min_micros=0,
min_accelerator_micros=0,
min_cpu_micros=0):
"""Only show profiler nodes consuming no less than 'min_micros'.
Args:
min_micros: Only show profiler nodes with execution time
no less than this. It sums accelerator and cpu times.
min_accelerator_micros: Only show profiler nodes spend no less than
this time on accelerator (e.g. GPU).
min_cpu_micros: Only show profiler nodes spend no less than
this time on cpu.
Returns:
self
"""
self._options['min_micros'] = min_micros
self._options['min_accelerator_micros'] = min_accelerator_micros
self._options['min_cpu_micros'] = min_cpu_micros
return self
def with_min_parameters(self, min_params):
"""Only show profiler nodes holding no less than 'min_params' parameters.
'Parameters' normally refers the weights of in TensorFlow variables.
It reflects the 'capacity' of models.
Args:
min_params: Only show profiler nodes holding number parameters
no less than this.
Returns:
self
"""
self._options['min_params'] = min_params
return self
def with_min_occurrence(self, min_occurrence):
# pylint: disable=line-too-long
"""Only show profiler nodes including no less than 'min_occurrence' graph nodes.
A "node" means a profiler output node, which can be a python line
(code view), an operation type (op view), or a graph node
(graph/scope view). A python line includes all graph nodes created by that
line, while an operation type includes all graph nodes of that type.
Args:
min_occurrence: Only show nodes including no less than this.
Returns:
self
"""
# pylint: enable=line-too-long
self._options['min_occurrence'] = min_occurrence
return self
def with_min_float_operations(self, min_float_ops):
# pylint: disable=line-too-long
"""Only show profiler nodes consuming no less than 'min_float_ops'.
Please see https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/profile_model_architecture.md
on the caveats of calculating float operations.
Args:
min_float_ops: Only show profiler nodes with float operations
no less than this.
Returns:
self
"""
# pylint: enable=line-too-long
self._options['min_float_ops'] = min_float_ops
return self
def with_accounted_types(self, account_type_regexes):
"""Selectively counting statistics based on node types.
Here, 'types' means the profiler nodes' properties. Profiler by default
consider device name (e.g. /job:xx/.../device:GPU:0) and operation type
(e.g. MatMul) as profiler nodes' properties. User can also associate
customized 'types' to profiler nodes through OpLogProto proto.
For example, user can select profiler nodes placed on gpu:0 with:
`account_type_regexes=['.*gpu:0.*']`
If none of a node's properties match the specified regexes, the node is
not displayed nor accounted.
Args:
account_type_regexes: A list of regexes specifying the types.
Returns:
self.
"""
self._options['account_type_regexes'] = copy.copy(account_type_regexes)
return self
def with_node_names(self,
start_name_regexes=None,
show_name_regexes=None,
hide_name_regexes=None,
trim_name_regexes=None):
"""Regular expressions used to select profiler nodes to display.
After 'with_accounted_types' is evaluated, 'with_node_names' are
evaluated as follows:
For a profile data structure, profiler first finds the profiler
nodes matching 'start_name_regexes', and starts displaying profiler
nodes from there. Then, if a node matches 'show_name_regexes' and
doesn't match 'hide_name_regexes', it's displayed. If a node matches
'trim_name_regexes', profiler stops further searching that branch.
Args:
start_name_regexes: list of node name regexes to start displaying.
show_name_regexes: list of node names regexes to display.
hide_name_regexes: list of node_names regexes that should be hidden.
trim_name_regexes: list of node name regexes from where to stop.
Returns:
self
"""
if start_name_regexes is not None:
self._options['start_name_regexes'] = copy.copy(start_name_regexes)
if show_name_regexes is not None:
self._options['show_name_regexes'] = copy.copy(show_name_regexes)
if hide_name_regexes is not None:
self._options['hide_name_regexes'] = copy.copy(hide_name_regexes)
if trim_name_regexes is not None:
self._options['trim_name_regexes'] = copy.copy(trim_name_regexes)
return self
def account_displayed_op_only(self, is_true):
"""Whether only account the statistics of displayed profiler nodes.
Args:
is_true: If true, only account statistics of nodes eventually
displayed by the outputs.
Otherwise, a node's statistics are accounted by its parents
as long as it's types match 'account_type_regexes', even if
it is hidden from the output, say, by hide_name_regexes.
Returns:
self
"""
self._options['account_displayed_op_only'] = is_true
return self
def with_empty_output(self):
"""Do not generate side-effect outputs."""
self._options['output'] = 'none'
return self
def with_stdout_output(self):
"""Print the result to stdout."""
self._options['output'] = 'stdout'
return self
def with_file_output(self, outfile):
"""Print the result to a file."""
self._options['output'] = 'file:outfile=%s' % outfile
return self
def with_timeline_output(self, timeline_file):
"""Generate a timeline json file."""
self._options['output'] = 'timeline:outfile=%s' % timeline_file
return self
def with_pprof_output(self, pprof_file):
"""Generate a pprof profile gzip file.
To use the pprof file:
pprof -png --nodecount=100 --sample_index=1 <pprof_file>
Args:
pprof_file: filename for output, usually suffixed with .pb.gz.
Returns:
self.
"""
self._options['output'] = 'pprof:outfile=%s' % pprof_file
return self
def order_by(self, attribute):
# pylint: disable=line-too-long
"""Order the displayed profiler nodes based on a attribute.
Supported attribute includes micros, bytes, occurrence, params, etc.
https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/options.md
Args:
attribute: An attribute the profiler node has.
Returns:
self
"""
# pylint: enable=line-too-long
self._options['order_by'] = attribute
return self
def select(self, attributes):
# pylint: disable=line-too-long
"""Select the attributes to display.
See https://github.com/tensorflow/tensorflow/tree/master/tensorflow/core/profiler/g3doc/options.md
for supported attributes.
Args:
attributes: A list of attribute the profiler node has.
Returns:
self
"""
# pylint: enable=line-too-long
self._options['select'] = copy.copy(attributes)
return self
def with_step(self, step):
"""Which profile step to use for profiling.
The 'step' here refers to the step defined by `Profiler.add_step()` API.
Args:
step: When multiple steps of profiles are available, select which step's
profile to use. If -1, use average of all available steps.
Returns:
self
"""
self._options['step'] = step
return self
|
|
# -*- coding: utf-8 -*-
import datetime
import functools
import operator
import re
from dateutil import parser
from modularodm import Q
from nose.tools import * # flake8: noqa
from rest_framework import serializers as ser
from unittest import TestCase
from tests.base import ApiTestCase
from api.base.filters import (
ListFilterMixin,
ODMFilterMixin,
)
import api.base.filters as filters
from api.base.exceptions import (
InvalidFilterError,
InvalidFilterOperator,
InvalidFilterComparisonType,
InvalidFilterMatchType,
)
from api.base.serializers import RelationshipField
class FakeSerializer(ser.Serializer):
filterable_fields = ('id', 'string_field', 'second_string_field','list_field', 'date_field', 'int_field', 'bool_field', 'relationship_field')
id = ser.CharField()
string_field = ser.CharField()
second_string_field = ser.CharField()
list_field = ser.ListField()
date_field = ser.DateField()
datetime_field = ser.DateTimeField()
int_field = ser.IntegerField()
float_field = ser.FloatField()
bool_field = ser.BooleanField(source='foobar')
relationship_field = RelationshipField(related_view='fake', related_view_kwargs={})
class FakeRecord(object):
def __init__(
self,
_id=None,
string_field='foo',
second_string_field='bar',
list_field=None,
date_field=datetime.datetime.now(),
datetime_field=datetime.datetime.now(),
int_field=42,
float_field=41.99999,
foobar=True
):
self._id = _id
self.string_field = string_field
self.second_string_field = second_string_field
self.list_field = list_field or [1, 2, 3]
self.date_field = date_field
self.datetime_field = datetime_field
self.int_field = int_field
self.float_field = float_field
# bool_field in serializer corresponds to foobar in model
self.foobar = foobar
class FakeView(ODMFilterMixin):
serializer_class = FakeSerializer
class FakeListView(ListFilterMixin):
serializer_class = FakeSerializer
class TestFilterMixin(ApiTestCase):
def setUp(self):
super(TestFilterMixin, self).setUp()
self.view = FakeView()
def test_parse_query_params_default_operators(self):
query_params = {
'filter[string_field]': 'foo',
'filter[list_field]': 'bar',
'filter[int_field]': '42',
'filter[bool_field]': 'false',
}
fields = self.view.parse_query_params(query_params)
assert_in('string_field', fields['filter[string_field]'])
assert_equal(fields['filter[string_field]']['string_field']['op'], 'icontains')
assert_in('list_field', fields['filter[list_field]'])
assert_equal(fields['filter[list_field]']['list_field']['op'], 'contains')
assert_in('int_field', fields['filter[int_field]'])
assert_equal(fields['filter[int_field]']['int_field']['op'], 'eq')
assert_in('bool_field', fields['filter[bool_field]'])
assert_equal(fields['filter[bool_field]']['bool_field']['op'], 'eq')
def test_parse_query_params_casts_values(self):
query_params = {
'filter[string_field]': 'foo',
'filter[list_field]': 'bar',
'filter[int_field]': '42',
'filter[bool_field]': 'false',
}
fields = self.view.parse_query_params(query_params)
assert_in('string_field', fields['filter[string_field]'])
assert_equal(fields['filter[string_field]']['string_field']['value'], 'foo')
assert_in('list_field', fields['filter[list_field]'])
assert_equal(fields['filter[list_field]']['list_field']['value'], 'bar')
assert_in('int_field', fields['filter[int_field]'])
assert_equal(fields['filter[int_field]']['int_field']['value'], 42)
assert_in('bool_field', fields.get('filter[bool_field]'))
assert_equal(fields['filter[bool_field]']['bool_field']['value'], False)
def test_parse_query_params_uses_field_source_attribute(self):
query_params = {
'filter[bool_field]': 'false',
}
fields = self.view.parse_query_params(query_params)
parsed_field = fields['filter[bool_field]']['bool_field']
assert_equal(parsed_field['source_field_name'], 'foobar')
assert_equal(parsed_field ['value'], False)
assert_equal(parsed_field ['op'], 'eq')
def test_parse_query_params_generalizes_dates(self):
query_params = {
'filter[date_field]': '2014-12-12'
}
fields = self.view.parse_query_params(query_params)
start = parser.parse('2014-12-12')
stop = start + datetime.timedelta(days=1)
for key, field_name in fields.iteritems():
for match in field_name['date_field']:
if match['op'] == 'gte':
assert_equal(match['value'], start)
elif match['op'] == 'lt':
assert_equal(match['value'], stop)
else:
self.fail()
def test_parse_query_params_comparable_field(self):
query_params = {
'filter[int_field][gt]': 42,
'filter[int_field][lte]': 9000
}
fields = self.view.parse_query_params(query_params)
for key, field_name in fields.iteritems():
if field_name['int_field']['op'] == 'gt':
assert_equal(field_name['int_field']['value'], 42)
elif field_name['int_field']['op'] == 'lte':
assert_equal(field_name['int_field']['value'], 9000)
else:
self.fail()
def test_parse_query_params_matchable_field(self):
query_params = {
'filter[string_field][contains]': 'foo',
'filter[string_field][icontains]': 'bar'
}
fields = self.view.parse_query_params(query_params)
for key, field_name in fields.iteritems():
if field_name['string_field']['op'] == 'contains':
assert_equal(field_name['string_field']['value'], 'foo')
elif field_name['string_field']['op'] == 'icontains':
assert_equal(field_name['string_field']['value'], 'bar')
else:
self.fail()
def test_parse_query_params_raises_InvalidFilterError_bad_field(self):
query_params = {
'filter[fake]': 'foo'
}
with assert_raises(InvalidFilterError):
self.view.parse_query_params(query_params)
def test_parse_query_params_raises_InvalidFilterComparisonType(self):
query_params = {
'filter[string_field][gt]': 'foo'
}
with assert_raises(InvalidFilterComparisonType):
self.view.parse_query_params(query_params)
def test_parse_query_params_raises_InvalidFilterMatchType(self):
query_params = {
'filter[date_field][icontains]': '2015'
}
with assert_raises(InvalidFilterMatchType):
self.view.parse_query_params(query_params)
def test_parse_query_params_raises_InvalidFilterOperator(self):
query_params = {
'filter[int_field][bar]': 42
}
with assert_raises(InvalidFilterOperator):
self.view.parse_query_params(query_params)
def test_InvalidFilterOperator_parameterizes_valid_operators(self):
query_params = {
'filter[int_field][bar]': 42
}
try:
self.view.parse_query_params(query_params)
except InvalidFilterOperator as err:
ops = re.search(r'one of (?P<ops>.+)\.$', err.detail).groupdict()['ops']
assert_equal(ops, "gt, gte, lt, lte, eq, ne")
query_params = {
'filter[string_field][bar]': 'foo'
}
try:
self.view.parse_query_params(query_params)
except InvalidFilterOperator as err:
ops = re.search(r'one of (?P<ops>.+)\.$', err.detail).groupdict()['ops']
assert_equal(ops, "contains, icontains, eq, ne")
def test_parse_query_params_supports_multiple_filters(self):
query_params = {
'filter[string_field]': 'foo',
'filter[string_field]': 'bar',
}
# FIXME: This test may only be checking one field
fields = self.view.parse_query_params(query_params)
assert_in('string_field', fields.get('filter[string_field]'))
for key, field_name in fields.iteritems():
assert_in(field_name['string_field']['value'], ('foo', 'bar'))
def test_convert_value_bool(self):
value = 'true'
field = FakeSerializer._declared_fields['bool_field']
value = self.view.convert_value(value, field)
assert_true(isinstance(value, bool))
assert_true(value)
def test_convert_value_date(self):
value = '2014-12-12'
field = FakeSerializer._declared_fields['date_field']
value = self.view.convert_value(value, field)
assert_true(isinstance(value, datetime.datetime))
assert_equal(value, parser.parse('2014-12-12'))
def test_convert_value_int(self):
value = '9000'
field = FakeSerializer._declared_fields['int_field']
value = self.view.convert_value(value, field)
assert_equal(value, 9000)
def test_convert_value_float(self):
value = '42'
orig_type = type(value)
field = FakeSerializer._declared_fields['float_field']
value = self.view.convert_value(value, field)
assert_equal(value, 42.0)
def test_convert_value_null_for_list(self):
value = 'null'
field = FakeSerializer._declared_fields['list_field']
value = self.view.convert_value(value, field)
assert_equal(value, [])
def test_multiple_filter_params(self):
query_params = {
'filter[string_field, second_string_field]': 'foobar'
}
fields = self.view.parse_query_params(query_params)
assert_equals(
fields['filter[string_field, second_string_field]'],
{
'string_field': {
'source_field_name': 'string_field',
'value': 'foobar',
'op': 'icontains'
},
'second_string_field' : {
'source_field_name': 'second_string_field',
'value': 'foobar',
'op': 'icontains'
}
}
)
query = self.view.query_params_to_odm_query(query_params)
assert_equals(
repr(query),
repr(functools.reduce(operator.or_, [
Q('second_string_field', 'icontains', 'foobar'),
Q('string_field', 'icontains', 'foobar')
]))
)
def test_multiple_filter_params_with_additional_different_filter(self):
query_params = {
'filter[string_field, second_string_field]': 'foobar',
'filter[bool_field]': False
}
fields = self.view.parse_query_params(query_params)
assert_equals(
fields,
{
'filter[bool_field]': {
'bool_field': {
'source_field_name': 'foobar',
'value': False,
'op': 'eq'
}
},
'filter[string_field, second_string_field]': {
'second_string_field' : {
'source_field_name': 'second_string_field',
'value': 'foobar',
'op': 'icontains'
},
'string_field': {
'source_field_name': 'string_field',
'value': 'foobar',
'op': 'icontains'
}
}
}
)
query = self.view.query_params_to_odm_query(query_params)
assert_equals(
repr(query),
repr(functools.reduce(operator.and_, [
Q('foobar', 'eq', False),
functools.reduce(operator.or_, [
Q('second_string_field', 'icontains', 'foobar'),
Q('string_field', 'icontains', 'foobar')
])
]))
)
def test_multiple_filter_params_with_additional_same_filter(self):
query_params = {
'filter[string_field, second_string_field]': 'foobar',
'filter[string_field]': 'baz'
}
fields = self.view.parse_query_params(query_params)
assert_equals(
fields,
{
'filter[string_field]': {
'string_field': {
'source_field_name': 'string_field',
'value': 'baz',
'op': 'icontains'
}
},
'filter[string_field, second_string_field]': {
'second_string_field' : {
'source_field_name': 'second_string_field',
'value': 'foobar',
'op': 'icontains'
},
'string_field': {
'source_field_name': 'string_field',
'value': 'foobar',
'op': 'icontains'
}
}
}
)
query = self.view.query_params_to_odm_query(query_params)
assert_equals(
repr(query),
repr(functools.reduce(operator.and_, [
functools.reduce(operator.or_, [
Q('second_string_field', 'icontains', 'foobar'),
Q('string_field', 'icontains', 'foobar')
]),
Q('string_field', 'icontains', 'baz')
]))
)
def test_multiple_filter_params_bad_filter(self):
query_params = {
'filter[string_field, not_a_field]': 'test'
}
with assert_raises(InvalidFilterError):
self.view.parse_query_params(query_params)
def test_bad_filter_operator(self):
query_params = {
'filter[relationship_field][invalid]': 'false',
}
with assert_raises(InvalidFilterOperator):
self.view.parse_query_params(query_params)
def test_simplified_date_filter(self):
query_params = {
'filter[date_field]': '2016-08-24'
}
query = self.view.query_params_to_odm_query(query_params)
assert_equals(
repr(query),
repr(functools.reduce(operator.and_, [
Q('date_field', 'gte', datetime.datetime(2016, 8, 24)),
Q('date_field', 'lt', datetime.datetime(2016, 8, 25)),
]))
)
class TestListFilterMixin(ApiTestCase):
def setUp(self):
super(TestListFilterMixin, self).setUp()
self.view = FakeListView()
def test_get_filtered_queryset_for_list_field_converts_to_lowercase(self):
field_name = 'list_field'
params = {
'value': 'FOO',
'source_field_name': field_name
}
default_queryset = [
FakeRecord(_id=1, list_field=['fOO', 'Foo', 'Bar', 'baR']),
FakeRecord(_id=2, list_field=['Foo', 'Bax']),
FakeRecord(_id=3, list_field=['Bar', 'baR', 'bat'])
]
filtered = self.view.get_filtered_queryset(field_name, params, default_queryset)
for record in filtered:
assert_not_equal(record._id, 3)
for id in (1, 2):
assert_in(id, [f._id for f in filtered])
def test_get_filtered_queryset_for_list_respects_special_case_of_ids_being_list(self):
field_name = 'bool_field'
params = {
'value': True,
'op': 'eq',
'source_field_name': 'foobar'
}
default_queryset = [
FakeRecord(_id=1, foobar=True),
FakeRecord(_id=2, foobar=True),
FakeRecord(_id=3, foobar=False)
]
filtered = self.view.get_filtered_queryset(field_name, params, default_queryset)
for record in filtered:
assert_not_equal(record._id, 3)
for id in (1, 2):
assert_in(id, [f._id for f in filtered])
def test_get_filtered_queryset_for_list_respects_id_always_being_list(self):
field_name = 'id'
params = {
'value': '2',
'op': 'in',
'source_field_name': '_id'
}
default_queryset = [
FakeRecord(_id='1', foobar=True),
FakeRecord(_id='2', foobar=True),
FakeRecord(_id='3', foobar=False)
]
filtered = self.view.get_filtered_queryset(field_name, params, default_queryset)
for record in filtered:
assert_equal(record._id, '2')
for id in ('1', '3'):
assert_not_in(id, [f._id for f in filtered])
def test_parse_query_params_uses_field_source_attribute(self):
query_params = {
'filter[bool_field]': 'false',
}
fields = self.view.parse_query_params(query_params)
parsed_field = fields['filter[bool_field]']['bool_field']
assert_equal(parsed_field['source_field_name'], 'foobar')
assert_equal(parsed_field ['value'], False)
assert_equal(parsed_field ['op'], 'eq')
class TestODMOrderingFilter(ApiTestCase):
class query:
title = ' '
def __init__(self, title):
self.title = title
def __str__(self):
return self.title
class query_with_num:
title = ' '
number = 0
def __init__(self, title, number):
self.title = title
self.number = number
def __str__(self):
return self.title
def test_filter_queryset_forward(self):
query_to_be_sorted = [self.query(x) for x in 'NewProj Zip Proj Activity'.split()]
sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['title']))
sorted_output = [str(i) for i in sorted_query]
assert_equal(sorted_output, ['Activity', 'NewProj', 'Proj', 'Zip'])
def test_filter_queryset_forward_duplicate(self):
query_to_be_sorted = [self.query(x) for x in 'NewProj Activity Zip Activity'.split()]
sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['title']))
sorted_output = [str(i) for i in sorted_query]
assert_equal(sorted_output, ['Activity', 'Activity', 'NewProj', 'Zip'])
def test_filter_queryset_reverse(self):
query_to_be_sorted = [self.query(x) for x in 'NewProj Zip Proj Activity'.split()]
sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['-title']))
sorted_output = [str(i) for i in sorted_query]
assert_equal(sorted_output, ['Zip', 'Proj', 'NewProj', 'Activity'])
def test_filter_queryset_reverse_duplicate(self):
query_to_be_sorted = [self.query(x) for x in 'NewProj Activity Zip Activity'.split()]
sorted_query = sorted(query_to_be_sorted, cmp=filters.sort_multiple(['-title']))
sorted_output = [str(i) for i in sorted_query]
assert_equal(sorted_output, ['Zip', 'NewProj', 'Activity', 'Activity'])
def test_filter_queryset_handles_multiple_fields(self):
objs = [self.query_with_num(title='NewProj', number=10),
self.query_with_num(title='Zip', number=20),
self.query_with_num(title='Activity', number=30),
self.query_with_num(title='Activity', number=40)]
actual = [x.number for x in sorted(objs, cmp=filters.sort_multiple(['title', '-number']))]
assert_equal(actual, [40, 30, 10, 20])
class TestQueryPatternRegex(TestCase):
def setUp(self):
super(TestQueryPatternRegex, self).setUp()
self.filter_regex = FakeView.QUERY_PATTERN
self.filter_fields = FakeView.FILTER_FIELDS
def test_single_field_filter(self):
filter_str = 'filter[name]'
match = self.filter_regex.match(filter_str)
fields = match.groupdict()['fields']
field_names = re.findall(self.filter_fields, fields)
assert_equal(fields, 'name')
assert_equal(field_names[0], 'name')
def test_double_field_filter(self):
filter_str = 'filter[name,id]'
match = self.filter_regex.match(filter_str)
fields = match.groupdict()['fields']
field_names = re.findall(self.filter_fields, fields)
assert_equal(fields, 'name,id')
assert_equal(field_names[0], 'name')
assert_equal(field_names[1], 'id')
def test_multiple_field_filter(self):
filter_str = 'filter[name,id,another,field,here]'
match = self.filter_regex.match(filter_str)
fields = match.groupdict()['fields']
field_names = re.findall(self.filter_fields, fields)
assert_equal(fields, 'name,id,another,field,here')
assert_equals(len(field_names), 5)
def test_single_field_filter_end_comma(self):
filter_str = 'filter[name,]'
match = self.filter_regex.match(filter_str)
assert_false(match)
def test_multiple_field_filter_end_comma(self):
filter_str = 'filter[name,id,]'
match = self.filter_regex.match(filter_str)
assert_false(match)
def test_multiple_field_filter_with_spaces(self):
filter_str = 'filter[name, id]'
match = self.filter_regex.match(filter_str)
fields = match.groupdict()['fields']
field_names = re.findall(self.filter_fields, fields)
assert_equal(fields, 'name, id')
assert_equal(field_names[0], 'name')
assert_equal(field_names[1], 'id')
def test_multiple_field_filter_with_blank_field(self):
filter_str = 'filter[name, , id]'
match = self.filter_regex.match(filter_str)
assert_false(match)
def test_multiple_field_filter_non_match(self):
filter_str = 'filter[name; id]'
match = self.filter_regex.match(filter_str)
assert_false(match)
def test_single_field_filter_non_match(self):
filter_str = 'fitler[name]'
match = self.filter_regex.match(filter_str)
assert_false(match)
def test_single_field_non_alphanumeric_character(self):
filter_str = 'fitler[<name>]'
match = self.filter_regex.match(filter_str)
assert_false(match)
|
|
__author__ = 'wenjusun'
from datetime import date
import calendar
import web
from cms.cms_model import Image, Preorder
from cms import cms_service
from cms import service_config
from wshelper import ServiceHelper
from wshelper import ListWrapper
urls = (
"/home", "HomePage",
"/test", "TestPage",
"/portfolio", "Portfolio",
"/ga/(\d+)", "GetArticle",
"/la", "ListArticles",
"/sc", "School",
"/news", "News",
"/service", "Service",
"/all","ListAllForHomePage",
"/rpic","RandomPic",
"/gallery", "AllGallery",
"/yy", "Yuyue",
"/captcha","GenerateCaptch",
"/gcal","GenerateCalendar",
"/showcal/(\d+)","ShowCalendar"
)
app = web.application(urls, globals())
config = service_config.config
t_globals = {
'datestr': web.datestr,
'service_config':config
}
render = web.template.render("templates/site", globals=t_globals)
cmsService = cms_service.cmsService
logger = config.getlogger()
serviceHelper = ServiceHelper()
_EVERY_PAGE = config.nevery_page
logger.info('wndxsite initialized')
class HomePage():
def GET(self):
rlist, total = self.get_all_articles()
return render.index(rlist, self.get_gallery_imgs())
def get_gallery_imgs(self):
start= 0
nfetch=16
acode = 'hg'
# cmsService.get_album_imglist(acode,start,nfetch,itype=Image.IMG_TYPE_HOME_GALLERY)
plist,ptotal = cmsService.get_album_imglist(acode,start,nfetch)
return plist
def get_all_articles(self):
start = 0
nfetch = 8
ctcode = None
return cmsService.list_articles(start, nfetch,ctcode,query_in_title=None,status=str(1))
class TestPage():
def GET(self):
return "Hello"
class Portfolio():
def GET(self):
ctcode = config.ctcode_portfolio
rlist, total = cmsService.list_articles(0, 1,ctcode,None,status=str(1))
lista = rlist[0]
article = cmsService.get_article(lista.oid)
if article:
return render.portfolio(article.article_meta, article.article_content)
else:
return render.common("failed:" + str(id))
class Service():
def GET(self):
params = web.input(np=0, kw=None)
npages = int(params.np)
start = npages * _EVERY_PAGE
nfetch = _EVERY_PAGE
keyword = params.kw
if keyword:
keyword = keyword.strip()
ctcode = config.ctcode_service
rlist, total = cmsService.list_articles(start, nfetch,ctcode,query_in_title=keyword,status=str(1))
total_pages = (total + _EVERY_PAGE - 1) / _EVERY_PAGE
return render.service(rlist, total, total_pages,npages)
class School():
def GET(self):
params = web.input(np=0, kw=None)
npages = int(params.np)
start = npages * _EVERY_PAGE
nfetch = _EVERY_PAGE
keyword = params.kw
if keyword:
keyword = keyword.strip()
ctcode = config.ctcode_school
rlist, total = cmsService.list_articles(start, nfetch,ctcode,query_in_title=keyword,status=str(1))
total_pages = (total + _EVERY_PAGE - 1) / _EVERY_PAGE
return render.school(rlist, total, total_pages,npages)
class News():
def GET(self):
params = web.input(np=0, kw=None)
npages = int(params.np)
start = npages * _EVERY_PAGE
nfetch = _EVERY_PAGE
keyword = params.kw
if keyword:
keyword = keyword.strip()
ctcode = config.ctcode_news
rlist, total = cmsService.list_articles(start, nfetch,ctcode,query_in_title=keyword,status=str(1))
total_pages = (total + _EVERY_PAGE - 1) / _EVERY_PAGE
# return to_jsonstr(ListWrapper(rlist,total,total_pages))
return render.news(rlist, total, total_pages,npages)
class GetArticle():
def GET(self, id):
article = cmsService.get_article(id)
if article:
return render.article(article.article_meta, article.article_content)
else:
return render.common("Not Found:" + str(id))
class ListArticles():
def GET(self):
params = web.input(np=0, kw=None,ctcode= None)
npages = int(params.np)
start = npages * _EVERY_PAGE
nfetch = _EVERY_PAGE
keyword = params.kw
if keyword:
keyword = keyword.strip()
if not params.ctcode:
ctcode = config.ctcode_article
else :
ctcode = params.ctcode
rlist, total = cmsService.list_articles(start, nfetch,ctcode,query_in_title=keyword,status=str(1))
total_pages = (total + _EVERY_PAGE - 1) / _EVERY_PAGE
# return to_jsonstr(ListWrapper(rlist,total,total_pages))
return render.article_list(rlist, total, total_pages,npages)
class ListAllForHomePage():
def GET(self):
params = web.input(n=0)
start = 0
nfetch = 8
if params.n :
nfetch = int(params.n)
ctcode = None
rlist, total = cmsService.list_articles(start, nfetch,ctcode,query_in_title=None,status=str(1))
class RandomPic():
def GET(self):
nfetch=4
#acode = 'hg'
# plist,ptotal = cmsService.get_album_imglist(acode, start, nfetch)
plist = cmsService.get_random_pic(nfetch)
return serviceHelper.to_jsonstr(ListWrapper(plist,total_count=len(plist)))
class AllGallery():
def GET(self):
params = web.input(start=0,nfetch=100)
start= int(params.start)
#TODO larger? pagination?
nfetch=int(params.nfetch)
acode = 'hg'
# cmsService.get_album_imglist(acode,start,nfetch,itype=Image.IMG_TYPE_HOME_GALLERY)
plist,ptotal = cmsService.get_album_imglist(acode,start, nfetch)
return render.all_gallery(plist, ptotal)
class Yuyue():
def GET(self):
return render.yuyue()
def POST(self):
params = web.input(age=0,genre=1,pdate=None)
preorder = serviceHelper.compose_preorder(params)
cmsService.create_preorder(preorder)
return render.common("OK")
class SearchArticles():
def GET(self):
params = web.input()
kw = params.kw
class GenerateCaptcha():
def GET(self):
return
class GenerateCalendar():
"Month calendar"
def GET(self):
today = date.today()
param = web.input(year=today.year,month=today.month)
year = param.year
month = param.month
return render.calendar(calendar.monthcalendar(year,month))
#TODO how to reveal on the page view.
class ShowAgenda():
"Show the agenda of a given photographer"
def GET(self,pgid):
today = date.today()
params = web.input(year=today.year,month=today.month)
year = params.year
month = params.month
if int(month)<10:
month='0'+month
polist = cmsService.list_preorder(int(pgid),Preorder.PO_STATUS_OPEN,'%s-%s'%(params.year,params.month))
return ""
|
|
#!/usr/bin/env python
# CREATED BY CHRISTOPHER LAVENDER
# BASED ON WORK BY ADAM BURKHOLDER
# INTEGRATIVE BIOINFORMATICS, NIEHS
# WORKING OBJECT ORIENTED VERSION
import os
import argparse
import csv
from operator import itemgetter
from TSScall import readInReferenceAnnotation
from collections import defaultdict
def makeRangesFromAnnotation(annotation):
ranges = defaultdict(list)
for entry in annotation:
ranges[annotation[entry]['chromosome']].append({
'transcript_id': entry,
'chromosome': annotation[entry]['chromosome'],
'start': annotation[entry]['tr_start'],
'end': annotation[entry]['tr_end'],
'strand': annotation[entry]['strand'],
'gene_id': annotation[entry]['gene_id'],
'tss': annotation[entry]['tss'],
})
for field in annotation[entry]['gtf_fields']:
ranges[annotation[entry]['chromosome']][-1][field] = \
annotation[entry]['gtf_fields'][field]
return ranges
def readInClusters(input_detail_file):
clusters = dict()
cluster_ranges = defaultdict(list)
with open(input_detail_file) as f:
next(f)
for line in f:
[
tss_id,
tss_type,
trs,
genes,
strand,
chromosome,
position,
reads,
divergent_flag,
divergent_partner,
divergent_distance,
convergent_flag,
convergent_partner,
convergent_distance,
tss_cluster,
cluster_members
] = line.strip().split('\t')[0:16]
if tss_cluster != "NA":
cluster_entry = {
'tss_id': tss_id,
'tss_type': tss_type,
'strand': strand,
'chromosome': chromosome,
'position': int(position),
'reads': int(reads),
'cluster_members': cluster_members,
}
if tss_cluster in clusters:
clusters[tss_cluster].append(cluster_entry)
else:
clusters[tss_cluster] = [cluster_entry]
for cluster in clusters:
max_reads = 0
max_read_entry = None
cluster_start = float('Inf')
cluster_end = float('-Inf')
for entry in clusters[cluster]:
if entry['tss_type'] == 'called from reference window':
max_reads = float('Inf')
max_read_entry = entry
elif entry['reads'] > max_reads:
max_reads = entry['reads']
max_read_entry = entry
elif entry['reads'] == max_reads:
if entry['tss_id'] < max_read_entry['tss_id']:
max_read_entry = entry
if cluster_start > entry['position']:
cluster_start = entry['position']
if cluster_end < entry['position']:
cluster_end = entry['position']
cluster_ranges[max_read_entry['chromosome']].append({
'cluster_id': cluster,
'representative_tss_id': max_read_entry['tss_id'],
'representative_tss_position': max_read_entry['position'],
'representative_tss_strand': max_read_entry['strand'],
'cluster_members': max_read_entry['cluster_members'],
'start': cluster_start,
'end': cluster_end,
'chromosome': max_read_entry['chromosome'],
})
return cluster_ranges
def findOverlaps(clusters, annotations, key, window):
def checkOverlap(entry_1, entry_2, threshold):
if entry_1['chromosome'] == entry_2['chromosome']:
if entry_1['start'] - threshold <= entry_2['start'] and\
entry_1['end'] + threshold >= entry_2['start']:
return True
elif entry_1['start'] - threshold <= entry_2['end'] and\
entry_1['end'] + threshold >= entry_2['end']:
return True
elif entry_1['start'] - threshold >= entry_2['start'] and\
entry_1['end'] + threshold <= entry_2['end']:
return True
return False
for chrom in clusters:
for cluster in clusters[chrom]:
overlaps = []
proximal = []
closest_dist = float('Inf')
closest_tss = None
closest_value = None
for annotation in annotations[chrom]:
if cluster['chromosome'] == annotation['chromosome']:
if annotation[key][0] is not None and key != 'gene_id' \
and key != 'transcript_id':
annotation_value = annotation[key][0]
else:
annotation_value = annotation[key]
if checkOverlap(cluster, annotation, 0):
if annotation_value not in overlaps:
overlaps.append(annotation_value)
elif checkOverlap(cluster, annotation, window):
if annotation_value not in overlaps and\
annotation_value not in proximal:
proximal.append(annotation_value)
tss_distance = abs(cluster['representative_tss_position'] -
annotation['tss'])
if closest_tss is None:
closest_tss = annotation
closest_value = annotation_value
closest_dist = tss_distance
elif tss_distance < closest_dist:
closest_tss = annotation
closest_value = annotation_value
closest_dist = tss_distance
elif tss_distance == closest_dist:
# tie-breakers: (1) upstream?, (2) plus strand?,
# (3) name
if annotation['tss'] < closest_tss['tss']:
closest_tss = annotation
closest_value = annotation_value
elif annotation['tss'] == closest_tss['tss']:
if annotation['strand'] >\
closest_tss['strand']:
closest_tss = annotation
closest_value = annotation_value
elif annotation['strand'] ==\
closest_tss['strand']:
if annotation_value < closest_value:
closest_value = annotation_value
cluster.update({
'overlapping': overlaps,
'proximal': proximal,
})
if closest_tss:
cluster.update({
'closest_id': closest_value,
'closest_dist': closest_dist,
})
else:
cluster.update({
'closest_id': 'NA',
'closest_dist': 'NA',
})
def findClosestActiveTSS(input_detail_file):
closest_tss = dict()
with open(input_detail_file) as f:
tss_list = csv.DictReader(f, skipinitialspace=True, delimiter='\t')
sorted_list = sorted(tss_list, key=lambda k: (
k['Chromosome'],
k['Position']
))
for i, tss in enumerate(sorted_list):
upstream_tss = None
downstream_tss = None
index = i
while index >= 0 and upstream_tss is None:
if sorted_list[index]['Type'] == \
'called from reference window' \
and tss['Chromosome'] == \
sorted_list[index]['Chromosome']:
upstream_tss = sorted_list[index]
index -= 1
index = i
while index < len(sorted_list) and downstream_tss is None:
if sorted_list[index]['Type'] == \
'called from reference window' \
and tss['Chromosome'] == \
sorted_list[index]['Chromosome']:
downstream_tss = sorted_list[index]
index += 1
if upstream_tss and downstream_tss:
upstream_distance = abs(
int(tss['Position']) - int(upstream_tss['Position']))
downstream_distance = abs(
int(tss['Position']) - int(downstream_tss['Position']))
if upstream_distance < downstream_distance:
closest = upstream_tss
elif downstream_distance < upstream_distance:
closest = downstream_tss
elif upstream_tss == downstream_tss:
closest = upstream_tss
elif upstream_tss:
closest = upstream_tss
elif downstream_tss:
closest = downstream_tss
else:
closest = None
closest_tss[tss['TSS ID']] = closest
return closest_tss
class ClusterClassify(object):
def __init__(self, **kwargs):
self.input_detail_file = kwargs['input_detail_file']
self.annotation_file = kwargs['annotation_file']
self.output_file = kwargs['output_file']
self.id_field = kwargs['identification_field']
self.proximity_threshold = kwargs['proximity_threshold']
assert os.path.exists(self.input_detail_file)
assert os.path.exists(self.annotation_file)
self.non_attribute_fields = [
'TSS ID',
'Type',
'Transcripts',
'Gene ID',
'Strand',
'Chromosome',
'Position',
'Reads',
'Divergent?',
'Divergent partner',
'Divergent distance',
'Convergent?',
'Convergent partner',
'Convergent distance',
'TSS cluster',
'TSSs in associated cluster',
]
self.execute()
def printOutput(self, clusters, closest, output_file):
attribute_fields = []
for cluster_id in closest:
if closest[cluster_id] is not None:
for key in closest[cluster_id]:
if key not in self.non_attribute_fields and \
key not in attribute_fields:
attribute_fields.append(key)
cluster_lines = dict()
with open(output_file, 'w') as OUTPUT:
# OUTPUT.write('{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n'
# .format(
header_fields = [
'Cluster ID',
'TSSs in cluster',
'Chromosome',
'Cluster start',
'Cluster end',
'Representative TSS',
'Representative TSS position',
'Representative TSS strand',
'Overlapping genes',
'Proximal genes (within ' +
str(self.proximity_threshold) + ')',
'Closest gene',
'Distance to closest gene',
'Closest active TSS ID',
'Distance to closest active TSS',
'Closest active TSS transcript ID',
'Closest active TSS gene ID',
'Closest active TSS chromosome',
'Closest active TSS position',
'Closest active TSS strand',
]
for field in attribute_fields:
header_fields.append('Closest active TSS ' + field.lower())
for i, field in enumerate(header_fields):
if i == 0:
OUTPUT.write(field)
else:
OUTPUT.write('\t' + field)
OUTPUT.write('\n')
for chrom in clusters:
for cluster in sorted(clusters[chrom],
key=itemgetter('cluster_id')):
overlapping = ';'.join(cluster['overlapping'])
proximal = ';'.join(cluster['proximal'])
line = (
'{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}'
.format(
cluster['cluster_id'],
cluster['cluster_members'],
cluster['chromosome'],
str(cluster['start']),
str(cluster['end']),
cluster['representative_tss_id'],
cluster['representative_tss_position'],
cluster['representative_tss_strand'],
overlapping,
proximal,
cluster['closest_id'],
cluster['closest_dist'],
))
tss_id = cluster['representative_tss_id']
if closest[tss_id] is not None:
line += ('\t{}' * 7).format(
closest[tss_id]
['TSS ID'],
str(abs(int(closest[tss_id]['Position']) -
int(cluster['representative_tss_position']))),
closest[tss_id]['Transcripts'],
closest[tss_id]['Gene ID'],
closest[tss_id]['Chromosome'],
closest[tss_id]['Position'],
closest[tss_id]['Strand'],
)
for field in attribute_fields:
if field in closest[tss_id]:
line += '\t' + closest[tss_id][field]
else:
line += '\tNA'
else:
for i in range(6 + len(attribute_fields)):
line += '\tNA'
line += '\n'
cluster_lines[cluster['cluster_id']] = line
for cluster_id in sorted(cluster_lines):
OUTPUT.write(cluster_lines[cluster_id])
def execute(self):
annotation = readInReferenceAnnotation(self.annotation_file)[0]
annotation_ranges = makeRangesFromAnnotation(annotation)
cluster_ranges = readInClusters(self.input_detail_file)
closest_TSSs = findClosestActiveTSS(self.input_detail_file)
findOverlaps(cluster_ranges, annotation_ranges, self.id_field,
self.proximity_threshold)
self.printOutput(cluster_ranges, closest_TSSs, self.output_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--proximity_threshold', '-p', default=10000, type=int,
help='distance threshold used to determine proximity\
(Default: 10000)')
parser.add_argument('--identification_field', '-i', default='gene_id',
type=str, help='field in GTF used to differentiate and\
identify genes (Default: "gene_id")')
parser.add_argument('input_detail_file', type=str,
help='input detail file from TSScall.py')
parser.add_argument('annotation_file', type=str,
help='annotation in GTF format')
parser.add_argument('output_file', type=str,
help='output tab-delimited cluster information file')
args = parser.parse_args()
ClusterClassify(**vars(args))
|
|
# coding:utf-8
'''
Created on 2017/11/11.
@author: chk01
'''
import math
import numpy as np
import h5py
import matplotlib.pyplot as plt
import scipy
from PIL import Image
from scipy import ndimage
import tensorflow as tf
from tensorflow.python.framework import ops
from class_four.week_one.cnn_utils import *
np.random.seed(1)
X_train_orig, Y_train_orig, X_test_orig, Y_test_orig, classes = load_dataset()
def preprocessing(X, Y):
X = X / 255.
Y = convert_to_one_hot(Y, 6).T
return X, Y
X_train, Y_train = preprocessing(X_train_orig, Y_train_orig)
X_test, Y_test = preprocessing(X_test_orig, Y_test_orig)
def create_placeholders(n_H0, n_W0, n_C0, n_y):
"""
Creates the placeholders for the tensorflow session.
Arguments:
n_H0 -- scalar, height of an input image
n_W0 -- scalar, width of an input image
n_C0 -- scalar, number of channels of the input
n_y -- scalar, number of classes
Returns:
X -- placeholder for the data input, of shape [None, n_H0, n_W0, n_C0] and dtype "float"
Y -- placeholder for the input labels, of shape [None, n_y] and dtype "float"
"""
X = tf.placeholder(name='X', shape=(None, n_H0, n_W0, n_C0), dtype=tf.float32)
Y = tf.placeholder(name='Y', shape=(None, n_y), dtype=tf.float32)
return X, Y
def initialize_parameters():
"""
Initializes weight parameters to build a neural network with tensorflow. The shapes are:
W1 : [4, 4, 3, 8]
W2 : [2, 2, 8, 16]
Returns:
parameters -- a dictionary of tensors containing W1, W2
"""
tf.set_random_seed(1) # so that your "random" numbers match ours
### START CODE HERE ### (approx. 2 lines of code)
W1 = tf.get_variable(name='W1', dtype=tf.float32, shape=(4, 4, 3, 8),
initializer=tf.contrib.layers.xavier_initializer(seed=0))
W2 = tf.get_variable(name='W2', dtype=tf.float32, shape=(2, 2, 8, 16),
initializer=tf.contrib.layers.xavier_initializer(seed=0))
### END CODE HERE ###
parameters = {"W1": W1,
"W2": W2}
return parameters
def forward_propagation(X, parameters):
"""
Implements the forward propagation for the model:
CONV2D -> RELU -> MAXPOOL -> CONV2D -> RELU -> MAXPOOL -> FLATTEN -> FULLYCONNECTED
Arguments:
X -- input dataset placeholder, of shape (input size, number of examples)
parameters -- python dictionary containing your parameters "W1", "W2"
the shapes are given in initialize_parameters
Returns:
Z3 -- the output of the last LINEAR unit
"""
# Retrieve the parameters from the dictionary "parameters"
W1 = parameters['W1']
W2 = parameters['W2']
### START CODE HERE ###
# CONV2D: stride of 1, padding 'SAME'
Z1 = tf.nn.conv2d(input=X, filter=W1, strides=(1, 1, 1, 1), padding='SAME')
# RELU
A1 = tf.nn.relu(Z1)
# MAXPOOL: window 8x8, sride 8, padding 'SAME'
P1 = tf.nn.max_pool(value=A1, ksize=(1, 8, 8, 1), strides=(1, 8, 8, 1), padding='SAME')
# CONV2D: filters W2, stride 1, padding 'SAME'
Z2 = tf.nn.conv2d(input=P1, filter=W2, strides=(1, 1, 1, 1), padding='SAME')
# RELU
A2 = tf.nn.relu(Z2)
# MAXPOOL: window 4x4, stride 4, padding 'SAME'
P2 = tf.nn.max_pool(value=A2, ksize=(1, 4, 4, 1), strides=(1, 4, 4, 1), padding='SAME')
# FLATTEN
P2 = tf.contrib.layers.flatten(inputs=P2)
# FULLY-CONNECTED without non-linear activation function (not not call softmax).
# 6 neurons in output layer. Hint: one of the arguments should be "activation_fn=None"
Z3 = tf.contrib.layers.fully_connected(P2, 6, activation_fn=None)
### END CODE HERE ###
return Z3
def compute_cost(Z3, Y):
"""
Computes the cost
Arguments:
Z3 -- output of forward propagation (output of the last LINEAR unit), of shape (6, number of examples)
Y -- "true" labels vector placeholder, same shape as Z3
Returns:
cost - Tensor of the cost function
"""
### START CODE HERE ### (1 line of code)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=Z3, labels=Y))
### END CODE HERE ###
return cost
def model(X_train, Y_train, X_test, Y_test, learning_rate=0.009,
num_epochs=100, minibatch_size=64, print_cost=True):
"""
Implements a three-layer ConvNet in Tensorflow:
CONV2D -> RELU -> MAXPOOL -> CONV2D -> RELU -> MAXPOOL -> FLATTEN -> FULLYCONNECTED
Arguments:
X_train -- training set, of shape (None, 64, 64, 3)
Y_train -- test set, of shape (None, n_y = 6)
X_test -- training set, of shape (None, 64, 64, 3)
Y_test -- test set, of shape (None, n_y = 6)
learning_rate -- learning rate of the optimization
num_epochs -- number of epochs of the optimization loop
minibatch_size -- size of a minibatch
print_cost -- True to print the cost every 100 epochs
Returns:
train_accuracy -- real number, accuracy on the train set (X_train)
test_accuracy -- real number, testing accuracy on the test set (X_test)
parameters -- parameters learnt by the model. They can then be used to predict.
"""
ops.reset_default_graph() # to be able to rerun the model without overwriting tf variables
tf.set_random_seed(1) # to keep results consistent (tensorflow seed)
seed = 3 # to keep results consistent (numpy seed)
(m, n_H0, n_W0, n_C0) = X_train.shape
print(Y_train.shape)
n_y = Y_train.shape[1]
costs = [] # To keep track of the cost
# Create Placeholders of the correct shape
### START CODE HERE ### (1 line)
X, Y = create_placeholders(n_H0, n_W0, n_C0, n_y)
### END CODE HERE ###
# Initialize parameters
### START CODE HERE ### (1 line)
parameters = initialize_parameters()
### END CODE HERE ###
# Forward propagation: Build the forward propagation in the tensorflow graph
### START CODE HERE ### (1 line)
Z3 = forward_propagation(X, parameters)
### END CODE HERE ###
# Cost function: Add cost function to tensorflow graph
### START CODE HERE ### (1 line)
cost = compute_cost(Z3, Y)
### END CODE HERE ###
# Backpropagation: Define the tensorflow optimizer. Use an AdamOptimizer that minimizes the cost.
### START CODE HERE ### (1 line)
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
### END CODE HERE ###
# Initialize all the variables globally
init = tf.global_variables_initializer()
# Start the session to compute the tensorflow graph
with tf.Session() as sess:
# Run the initialization
sess.run(init)
# Do the training loop
for epoch in range(num_epochs):
minibatch_cost = 0.
num_minibatches = int(m / minibatch_size) # number of minibatches of size minibatch_size in the train set
seed = seed + 1
minibatches = random_mini_batches(X_train, Y_train, minibatch_size, seed)
for minibatch in minibatches:
# Select a minibatch
(minibatch_X, minibatch_Y) = minibatch
# IMPORTANT: The line that runs the graph on a minibatch.
# Run the session to execute the optimizer and the cost, the feedict should contain a minibatch for (X,Y).
### START CODE HERE ### (1 line)
_, temp_cost = sess.run([optimizer, cost], feed_dict={X: minibatch_X, Y: minibatch_Y})
### END CODE HERE ###
minibatch_cost += temp_cost / num_minibatches
# Print the cost every epoch
if print_cost and epoch % 5 == 0:
print("Cost after epoch %i: %f" % (epoch, minibatch_cost))
if print_cost and epoch % 1 == 0:
costs.append(minibatch_cost)
# plot the cost
plt.plot(np.squeeze(costs))
plt.ylabel('cost')
plt.xlabel('iterations (per tens)')
plt.title("Learning rate =" + str(learning_rate))
plt.show()
# Calculate the correct predictions
predict_op = tf.argmax(tf.nn.sigmoid(Z3), 1)
correct_prediction = tf.equal(predict_op, tf.argmax(Y, 1))
# Calculate accuracy on the test set
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
print(accuracy)
train_accuracy = accuracy.eval({X: X_train, Y: Y_train})
test_accuracy = accuracy.eval({X: X_test, Y: Y_test})
print("Train Accuracy:", train_accuracy)
print("Test Accuracy:", test_accuracy)
return train_accuracy, test_accuracy, parameters
if __name__ == '__main__':
print(X_train.shape)
_, _, parameters = model(X_train, Y_train, X_test, Y_test)
|
|
# Copyright (c) 2014-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
import collections
from cloudify import exceptions as cfy_exc
from tests.unittests import test_mock_base
import vcloud_network_plugin
from vcloud_network_plugin import utils
import vcloud_plugin_common
class NetworkPluginMockTestCase(test_mock_base.TestBase):
def test_get_vm_ip(self):
"""
check get vm ip from conected networks
"""
fake_client = self.generate_client(vms_networks=[])
fake_ctx = self.generate_relation_context()
fake_ctx._source.node.properties = {
'vcloud_config': {
'edge_gateway': 'some_edge_gateway',
'vdc': 'vdc_name'
}
}
fake_ctx._source.instance.runtime_properties = {
vcloud_network_plugin.VCLOUD_VAPP_NAME: "name"
}
# empty connections/no connection name
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_vm_ip(
fake_client, fake_ctx, fake_client._vdc_gateway
)
vms_networks = [{
'is_connected': True,
'network_name': 'network_name',
'is_primary': True,
'ip': '1.1.1.1'
}]
fake_client = self.generate_client(vms_networks=vms_networks)
# not routed
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_vm_ip(
fake_client, fake_ctx, fake_client._vdc_gateway
)
# routed
self.set_network_routed_in_client(fake_client)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertEqual(
vcloud_network_plugin.get_vm_ip(
fake_client, fake_ctx, fake_client._vdc_gateway
),
'1.1.1.1'
)
# no networks
fake_client._vapp.get_vms_network_info = mock.MagicMock(
return_value=[]
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_vm_ip(
fake_client, fake_ctx, fake_client._vdc_gateway
)
# no vapp
fake_client.get_vapp = mock.MagicMock(return_value=None)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_vm_ip(
fake_client, fake_ctx, fake_client._vdc_gateway
)
def test_collectAssignedIps(self):
"""
get list ips already used in current gateway based on nat
rules
"""
# empty gateway
self.assertEqual(
vcloud_network_plugin.collectAssignedIps(None),
set([])
)
# snat
gateway = self.generate_gateway()
rule_inlist = self.generate_nat_rule(
'SNAT', 'external', 'any', 'internal', '11', 'TCP'
)
gateway.get_nat_rules = mock.MagicMock(
return_value=[rule_inlist]
)
self.assertEqual(
vcloud_network_plugin.collectAssignedIps(gateway),
set(
[vcloud_network_plugin.AssignedIPs(
external='internal', internal='external'
)]
)
)
# dnat
gateway = self.generate_gateway()
rule_inlist = self.generate_nat_rule(
'DNAT', 'external', 'any', 'internal', '11', 'TCP'
)
gateway.get_nat_rules = mock.MagicMock(return_value=[
rule_inlist
])
self.assertEqual(
vcloud_network_plugin.collectAssignedIps(gateway),
set(
[vcloud_network_plugin.AssignedIPs(
external='external', internal='internal'
)]
)
)
def test_getFreeIP(self):
"""
check list returned list of free ip
"""
# exist free ip
gateway = self.generate_gateway()
gateway.get_public_ips = mock.MagicMock(return_value=[
'10.18.1.1', '10.18.1.2'
])
rule_inlist = self.generate_nat_rule(
'DNAT', '10.18.1.1', 'any', 'internal', '11', 'TCP'
)
gateway.get_nat_rules = mock.MagicMock(return_value=[rule_inlist])
self.assertEqual(
vcloud_network_plugin.getFreeIP(gateway),
'10.18.1.2'
)
# no free ips
gateway.get_public_ips = mock.MagicMock(return_value=[
'10.18.1.1'
])
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.getFreeIP(gateway)
def test_del_ondemand_public_ip(self):
"""
test release public ip
"""
fake_client = self.generate_client()
gateway = self.generate_gateway()
fake_ctx = self.generate_node_context()
# can't deallocate ip
gateway.deallocate_public_ip = mock.MagicMock(return_value=None)
with mock.patch(
'vcloud_network_plugin.wait_for_gateway', mock.MagicMock()
):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.del_ondemand_public_ip(
fake_client, gateway, '127.0.0.1', fake_ctx)
gateway.deallocate_public_ip.assert_called_with('127.0.0.1')
# successfully dropped public ip
gateway.deallocate_public_ip = mock.MagicMock(
return_value=self.generate_task(
vcloud_plugin_common.TASK_STATUS_SUCCESS
)
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with mock.patch(
'vcloud_network_plugin.wait_for_gateway', mock.MagicMock()
):
vcloud_network_plugin.del_ondemand_public_ip(
fake_client, gateway, '127.0.0.1', fake_ctx)
def test_save_gateway_configuration(self):
"""
check reation of out code for different results from server
on save configuration
"""
gateway = self.generate_gateway()
fake_client = self.generate_client()
fake_ctx = self.generate_node_context()
# cant save configuration - error in first call
self.set_services_conf_result(
gateway, None
)
fake_ctx = self.generate_node_context()
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.save_gateway_configuration(
gateway, fake_client, fake_ctx)
# error in status
self.set_services_conf_result(
gateway, vcloud_plugin_common.TASK_STATUS_ERROR
)
with self.assertRaises(cfy_exc.NonRecoverableError):
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
vcloud_network_plugin.save_gateway_configuration(
gateway, fake_client, fake_ctx)
# everything fine
self.set_services_conf_result(
gateway, vcloud_plugin_common.TASK_STATUS_SUCCESS
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertTrue(
vcloud_network_plugin.save_gateway_configuration(
gateway, fake_client, fake_ctx))
# server busy
self.set_services_conf_result(
gateway, None
)
self.set_gateway_busy(gateway)
self.assertFalse(
vcloud_network_plugin.save_gateway_configuration(
gateway, fake_client, fake_ctx))
def test_is_network_routed(self):
"""
check network route state
"""
fake_client = self.generate_client(
vms_networks=[{
'is_connected': True,
'network_name': 'network_name',
'is_primary': True,
'ip': '1.1.1.1'
}]
)
fake_ctx = self.generate_node_context()
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
# not routed by nat
network = self.generate_fake_client_network("not_routed")
fake_client.get_network = mock.MagicMock(return_value=network)
self.assertFalse(
vcloud_network_plugin.is_network_routed(
fake_client, 'network_name',
fake_client._vdc_gateway
)
)
# nat routed
self.set_network_routed_in_client(fake_client)
self.assertTrue(
vcloud_network_plugin.is_network_routed(
fake_client, 'network_name',
fake_client._vdc_gateway
)
)
# nat routed but for other network
self.assertFalse(
vcloud_network_plugin.is_network_routed(
fake_client, 'other_network_name',
fake_client._vdc_gateway
)
)
def test_get_vapp_name(self):
"""
check get vapp name
"""
self.assertEqual(
vcloud_network_plugin.get_vapp_name({
vcloud_network_plugin.VCLOUD_VAPP_NAME: "name"
}),
"name"
)
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_vapp_name({
"aa": "aaa"
})
def test_check_port(self):
"""
check port
"""
# port int
utils.check_port(10)
# port int to big
with self.assertRaises(cfy_exc.NonRecoverableError):
utils.check_port(utils.MAX_PORT_NUMBER + 1)
# port any
utils.check_port('any')
# port not any and not int
with self.assertRaises(cfy_exc.NonRecoverableError):
utils.check_port('some')
def test_CheckAssignedExternalIp(self):
"""
Check assigned external ip
"""
gateway = self.generate_gateway()
gateway.get_public_ips = mock.MagicMock(return_value=[
'10.1.1.1', '10.1.1.2'
])
rule_inlist = self.generate_nat_rule(
'DNAT', '10.1.1.1', 'any', '123.1.1.1', '11', 'TCP'
)
gateway.get_nat_rules = mock.MagicMock(
return_value=[rule_inlist]
)
# free ip
vcloud_network_plugin.CheckAssignedExternalIp(
'10.10.1.2', gateway
)
# assigned ip
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.CheckAssignedExternalIp(
'10.1.1.1', gateway
)
def test_CheckAssignedInternalIp(self):
"""
Check assigned internal ip
"""
gateway = self.generate_gateway()
gateway.get_public_ips = mock.MagicMock(return_value=[
'10.1.1.1', '10.1.1.2'
])
rule_inlist = self.generate_nat_rule(
'DNAT', '10.1.1.1', 'any', '123.1.1.1', '11', 'TCP'
)
gateway.get_nat_rules = mock.MagicMock(
return_value=[rule_inlist]
)
# free ip
vcloud_network_plugin.CheckAssignedInternalIp(
'123.1.1.2', gateway
)
# assigned ip
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.CheckAssignedInternalIp(
'123.1.1.1', gateway
)
def test_get_gateway(self):
"""
check get gateway
"""
# good case
fake_client = self.generate_client()
fake_ctx = self.generate_node_context()
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertEqual(
vcloud_network_plugin.get_gateway(
fake_client, 'test name'
),
fake_client._vdc_gateway
)
fake_client.get_gateway.assert_called_with(
'vdc_name', 'test name'
)
# bad case
fake_client = self.generate_client()
fake_ctx = self.generate_node_context()
fake_client.get_gateway = mock.MagicMock(return_value=None)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_gateway(
fake_client, 'test name'
)
def test_get_network(self):
"""
check get network
"""
# good case
fake_client = self.generate_client()
fake_ctx = self.generate_node_context()
fake_network = self.generate_fake_client_network(
'test name'
)
fake_client.get_network = mock.MagicMock(
return_value=fake_network
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertEqual(
vcloud_network_plugin.get_network(
fake_client, 'test name'
),
fake_network
)
fake_client.get_network.assert_called_with(
'vdc_name', 'test name'
)
# bad case network not exist
fake_client = self.generate_client()
fake_ctx = self.generate_node_context()
fake_client.get_network = mock.MagicMock(return_value=None)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_network(
fake_client, 'test name'
)
# worse case = nework == None
fake_client = self.generate_client()
fake_ctx = self.generate_node_context()
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_network(
fake_client, None
)
def test_get_network_name(self):
"""
check get network name
"""
# external without resource_id
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_network_name({
'use_external_resource': True
})
# exteranal with resource_id
self.assertEqual(
vcloud_network_plugin.get_network_name({
'use_external_resource': True,
'resource_id': 'some_text'
}),
'some_text'
)
# internal, without network
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_network_name({})
# network without name
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_network_name({
'network': {
'name': None
}
})
# good case
self.assertEqual(
vcloud_network_plugin.get_network_name({
'network': {
'name': 'good_text'
}
}),
'good_text'
)
def test_check_protocol(self):
"""
check default protocols
"""
for protocol in utils.VALID_PROTOCOLS:
self.assertEqual(
protocol.capitalize(),
utils.check_protocol(protocol).capitalize()
)
# something unknow
with self.assertRaises(cfy_exc.NonRecoverableError):
utils.check_protocol("Unknow").capitalize()
def test_get_ondemand_public_ip(self):
"""
check allocate public ip for ondemand
"""
fake_ctx = self.generate_node_context()
fake_client = self.generate_client()
# empty result from server
fake_client._vdc_gateway.allocate_public_ip = mock.MagicMock(
return_value=None
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_ondemand_public_ip(
fake_client, fake_client._vdc_gateway, fake_ctx
)
# success allocate ips, but empty list of ips
fake_client._vdc_gateway.allocate_public_ip = mock.MagicMock(
return_value=self.generate_task(
vcloud_plugin_common.TASK_STATUS_SUCCESS
)
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.get_ondemand_public_ip(
fake_client, fake_client._vdc_gateway, fake_ctx
)
# exist some new ip
new_gateway = self.generate_gateway()
new_gateway.get_public_ips = mock.MagicMock(
return_value=['1.1.1.1']
)
fake_client.get_gateways = mock.MagicMock(
return_value=[new_gateway]
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertEqual(
vcloud_network_plugin.get_ondemand_public_ip(
fake_client, fake_client._vdc_gateway, fake_ctx
),
'1.1.1.1'
)
def test_get_public_ip_subscription(self):
"""
check allocate public ip / subscription
"""
gateway = self.generate_gateway()
gateway.get_public_ips = mock.MagicMock(return_value=[
'10.18.1.1', '10.18.1.2'
])
rule_inlist = self.generate_nat_rule(
'DNAT', '10.18.1.1', 'any', 'internal', '11', 'TCP'
)
gateway.get_nat_rules = mock.MagicMock(
return_value=[rule_inlist]
)
fake_ctx = self.generate_node_context()
# for subscription we dont use client
self.assertEqual(
vcloud_network_plugin.get_public_ip(
None, gateway,
vcloud_plugin_common.SUBSCRIPTION_SERVICE_TYPE, fake_ctx
),
'10.18.1.2'
)
def test_get_public_ip_ondemand(self):
"""
check allocate public ip / ondemand
"""
# ondemand
fake_ctx = self.generate_node_context()
fake_client = self.generate_client()
fake_client._vdc_gateway.get_public_ips = mock.MagicMock(
return_value=[]
)
fake_client._vdc_gateway.allocate_public_ip = mock.MagicMock(
return_value=self.generate_task(
vcloud_plugin_common.TASK_STATUS_SUCCESS
)
)
new_gateway = self.generate_gateway()
new_gateway.get_public_ips = mock.MagicMock(
return_value=['10.18.1.21']
)
fake_client.get_gateways = mock.MagicMock(
return_value=[new_gateway]
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertEqual(
vcloud_network_plugin.get_public_ip(
fake_client, fake_client._vdc_gateway,
vcloud_plugin_common.ONDEMAND_SERVICE_TYPE, fake_ctx
),
'10.18.1.21'
)
def test_check_ip(self):
"""
check ip code
"""
# wrong type
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.check_ip({'wrong': None})
# wrong value
with self.assertRaises(cfy_exc.NonRecoverableError):
vcloud_network_plugin.check_ip("1.1.1.400")
# good case
self.assertEqual(
vcloud_network_plugin.check_ip("1.1.1.40"),
"1.1.1.40"
)
def test_is_valid_ip_range(self):
"""
check ip range
"""
# wrong range
self.assertFalse(
vcloud_network_plugin.is_valid_ip_range("1.1.1.50", "1.1.1.40")
)
# good case
self.assertTrue(
vcloud_network_plugin.is_valid_ip_range("1.1.1.40", "1.1.1.50")
)
def test_is_network_exists(self):
"""
check network exist
"""
# network exist
fake_ctx = self.generate_node_context()
fake_client = self.generate_client()
fake_client.get_network = mock.MagicMock(
return_value=self.generate_fake_client_network('test')
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertTrue(
vcloud_network_plugin.is_network_exists(fake_client, 'test')
)
fake_client.get_network.assert_called_with('vdc_name', 'test')
# network not exist
fake_ctx = self.generate_node_context()
fake_client = self.generate_client()
fake_client.get_network = mock.MagicMock(
return_value=None
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertFalse(
vcloud_network_plugin.is_network_exists(fake_client, 'test')
)
def test_is_ips_in_same_subnet(self):
"""
ips in same net
"""
# ips from several networks
self.assertFalse(
vcloud_network_plugin.is_ips_in_same_subnet(
['123.11.1.1', '123.11.3.1'], 24
)
)
# ips from same network
self.assertTrue(
vcloud_network_plugin.is_ips_in_same_subnet(
['123.11.1.1', '123.11.1.1'], 24
)
)
def test_is_separate_ranges(self):
"""
ips in separate ranges
"""
IPRange = collections.namedtuple('IPRange', 'start end')
# positive case
self.assertTrue(
vcloud_network_plugin.is_separate_ranges(
IPRange(start='1.1.1.1', end='1.1.1.11'),
IPRange(start='1.1.1.12', end='1.1.1.23')
)
)
# negative case
self.assertFalse(
vcloud_network_plugin.is_separate_ranges(
IPRange(start='1.1.1.1', end='1.1.1.15'),
IPRange(start='1.1.1.9', end='1.1.1.23')
)
)
if __name__ == '__main__':
unittest.main()
|
|
"""Module responsible for loading trained models and guessing virus names.
Overall, the idea is to guess a name for a virus from the names that Antiviruses
give it.
When using the Guesser class, all Exceptions are wrapped in a
NameGeneratorException.
Usage:
In [1]: from name_generator import Guesser
In [2]: g = Guesser()
In [3]: g.guess_everything({"F-Prot": "W32/AddInstall.A",
...: "Comodo": "Application.Win32.InstalleRex.KG"})
Out[3]:
{'compiler': 'unknown',
'_type': 'Application',
'group': 'unknown',
'ident': 'A',
'family': 'AddInstall',
'platform': 'Win32',
'language': 'unknown'}
All labels are guessed using CRFSUITE conditional random fields.
For example, we would have two family labels in the example above:
"AddInstall" and "InstalleRex".
The following strategies are used to pick among the labeled antivirus names:
- Family is guessed using TFIDF ratios for families across all documents.
- Group and Identity are guessed by most commonly occurring groups and
identities within AVs that guessed the picked family or guessed close to
a picked family. This is because the labels for group and identity only
make sense within the confines of a specific family.
- Platform is guessed using heuristics.
- language, compiler, and _type are those that occur most often in the
labeled set.
"""
import itertools
import Levenshtein
import logging
import numpy as np
import re
import scipy
import ujson as json
from collections import Counter
from collections import defaultdict
from pycrfsuite import Tagger
from sklearn.feature_extraction.text import TfidfVectorizer
CRF_MODEL_PATH = "all_train.model"
TANIMOTO_MAPPING_PATH = "tanimoto_mapping.json"
VOCAB_PATH = "tfidf_vocab.json"
IDF_WEIGHTS_PATH = "tfidf_idf.json"
REGEX_NONWORD = re.compile("\W")
REGEX_NONWORD_SAVED = re.compile("(\W)")
UNKNOWN = 'unknown'
FAMILY = 'family'
PLATFORM = 'platform'
GROUP = 'group'
IDENT = 'ident'
GENERIC_FAMILIES = set(['Generic', 'Gen', 'GENERIC', 'Genetic'])
OTHER_GUESSABLE_TAGS = ['language', 'compiler', '_type']
AVS = set(['TotalDefense',
'ViRobot',
'TheHacker',
'ClamAV',
'CAT-QuickHeal',
'Antiy-AVL',
'Baidu-International',
'Agnitum',
'Bkav',
'nProtect',
'Jiangmin',
'Commtouch',
'F-Prot',
'Microsoft',
'SUPERAntiSpyware',
'Ad-Aware',
'Symantec',
'AhnLab-V3',
'Rising',
'NANO-Antivirus',
'Norman',
'Ikarus',
'Kingsoft',
'K7AntiVirus',
'Panda',
'VBA32',
'Emsisoft',
'Fortinet',
'F-Secure',
'Malwarebytes',
'MicroWorld-eScan',
'BitDefender',
'Avast',
'Kaspersky',
'DrWeb',
'Sophos',
'Comodo',
'GData',
'ESET-NOD32',
'AVG',
'AntiVir',
'VIPRE',
'McAfee'])
TEMPLATES = (
(('w', -2), ),
(('w', -1), ),
(('w', 0), ),
(('w', 1), ),
(('w', 2), ),
(('w', -1), ('w', 0)),
(('w', 0), ('w', 1)),
(('pos', -2), ),
(('pos', -1), ),
(('pos', 0), ),
(('pos', 1), ),
(('pos', 2), ),
(('pos', -2), ('pos', -1)),
(('pos', -1), ('pos', 0)),
(('pos', 0), ('pos', 1)),
(('pos', 1), ('pos', 2)),
(('pos', -2), ('pos', -1), ('pos', 0)),
(('pos', -1), ('pos', 0), ('pos', 1)),
(('pos', 0), ('pos', 1), ('pos', 2)),
(('av', 0), ),
)
class NameGeneratorException(Exception):
pass
class FamilyPostproc(object):
def families_to_canonical(self, families):
"""Convert list of family lists to post list of postprocessed lists.
:param list families: list of lists of families.
:rtype: list.
"""
all_output_families = []
for cluster in families:
inner_cluster = []
for fam in cluster:
inner_cluster.append(self.synonimous_mapping.get(fam, fam))
all_output_families.append(inner_cluster)
return all_output_families
class TanimotoPostproc(FamilyPostproc):
def __init__(self, save_file=False):
self.synonimous_mapping = json.load(open(save_file))
class EditDistancePostproc(FamilyPostproc):
def __init__(self, similarity_func=Levenshtein.jaro_winkler, threshold=0.9):
self.similarity_func = similarity_func
self.threshold = threshold
def mapping_from_one_list(self, names_list):
"""Convert list of families to list of postprocessed families.
Uses edit distance to replace similar names with longer names
that are < `self.threshold` edit distance away.
:param list names_list: list of families.
:rtype: list.
"""
all_clusters = []
names_list_uniq = list(set(names_list))
indices = set(range(len(names_list_uniq)))
while indices:
current_idx = indices.pop()
current_w = names_list_uniq[current_idx]
current_cluster = [current_w]
idxes_to_discard = set()
for idx in indices:
comparison_w = names_list_uniq[idx]
if comparison_w == current_w:
continue
if self.similarity_func(
current_w, comparison_w) > self.threshold:
idxes_to_discard.add(idx)
current_cluster.append(comparison_w)
indices.difference_update(idxes_to_discard)
all_clusters.append(current_cluster)
return similar_names_from_name_clusters(all_clusters)
def families_to_canonical(self, families):
all_output_families = []
for group in families:
inner_cluster = []
synonimous_mapping = self.mapping_from_one_list(group)
for fam in group:
inner_cluster.append(synonimous_mapping.get(fam, fam))
all_output_families.append(inner_cluster)
return all_output_families
def make_postprocessors(tani_sf=None):
"""Postprocessor factory.
:param str tani_sf: path to saved tanimoto JSON.
:rtype: list
"""
save_file = tani_sf if tani_sf is not None else TANIMOTO_MAPPING_PATH
return [EditDistancePostproc(), TanimotoPostproc(save_file=save_file)]
def _extract_features(X):
"""Extracts feature using `TEMPLATES` from a sequence of features `X`."""
all_features = []
for i, _ in enumerate(X):
el_features = []
for template in TEMPLATES:
features_i = []
name = '|'.join(['%s[%d]' % (f, o) for f, o in template])
for field, offset in template:
p = i + offset
if p < 0 or p >= len(X):
features_i = []
break
features_i.append(X[p][field])
if features_i:
el_features.append('%s=%s' % (name, '|'.join(features_i)))
all_features.append(el_features)
all_features[0].append('__BOS__')
all_features[-1].append('__EOS__')
return all_features
def _extract_tags(tags_dict, tag):
return [t[0] for t in tags_dict[tag] if t]
def similar_names_from_name_clusters(name_clusters):
"""Maps similar features to their best replacement.
Takes a sequence of lists of similar strings and creats a mapping of
all strings in that list to the longest one.
EG. [['abc','a','b'],['cde', 'cd']] => {'a': 'abc', 'b': 'abc', 'cd': 'cde'}
:param iterable name_clusters: iterable of lists or tuples of strings.
:rtype: dict
"""
d = {}
for cluster in name_clusters:
longest = max(cluster, key=lambda x: len(x))
for name in cluster:
if name != longest:
d[name] = longest
return d
def preprocess_av_result(av_result, av):
"""Split an av result into a list of maps for word, pos, and av if present.
EG. take something like 'win32.malware.group' and convert to
[{'av': 'someav', 'w': 'win32', 'pos': '0'},
{'av': 'someav', 'w': '.', 'pos': '.'},
{'av': 'someav', 'w': 'malware', 'pos': '1'},
{'av': 'someav', 'w': '.', 'pos': '.'},
{'av': 'someav', 'w': 'group', 'pos': '2'}]
:param str av_result: string that an ativirus returns.
:param str av: name of the AV.
:rtype: list
"""
split_delim = [el if el != ' ' else '_' for el in
REGEX_NONWORD_SAVED.split(av_result)]
split_no_delim = REGEX_NONWORD.split(av_result)
delims = set(split_delim) - set(split_no_delim)
counter = 0
tags = []
for el in split_delim:
if el in delims:
tags.append(el)
else:
tags.append(str(counter))
counter += 1
if av is not None:
return [{'w': i, 'pos': j, 'av': k} for i, j, k in
zip(split_delim, tags, itertools.repeat(av)) if i != '']
else:
return [{'w': i, 'pos': j} for i, j in
zip(split_delim, tags) if i != '']
def load_tagger(model_path):
"""Loads tagger from a CRFSUITE binary model file.
:param str model_path: path to the binary model file.
"""
tagger = Tagger()
tagger.open(model_path)
return tagger
def load_tfidf(vocab_path, idf_weights_path):
"""Loads tfidf vectorizer from its components.
:param str vocab_path: path to the vectorizer vocabulary JSON.
:param str idf_weights_path: path to idf weights JSON.
:rtype: sklearn.feature_extraction.text.TfidfVectorizer
"""
tfidf = TfidfVectorizer(analyzer=lambda x: x,
vocabulary=json.load(open(vocab_path)))
idf_vector = np.array(json.load(open(idf_weights_path)))
tfidf._tfidf._idf_diag = scipy.sparse.diags([idf_vector], [0])
tfidf.vocabulary_ = tfidf.vocabulary
return tfidf
def get_all_tags(av_dict, tagger):
"""Creates a dictionary of tag types to list of tags, av tuples.
Example:
{'SomeAntivirus': "Win32/Trojan"} =>
{"family": [("unknown", "SomeAntivirus")],
"platform": [("Win32", "SomeAntivirus")],
"_type": [("Trojan", "SomeAntivirus")]
}
:param dict av_dict: AV dictionary to tag.
:param pycrfsuite._pycrfsuite.Tagger tagger: tagger to use.
:rtype: dict
"""
all_results = defaultdict(list)
for tag in OTHER_GUESSABLE_TAGS:
all_results[tag] = []
for tag in (PLATFORM, FAMILY, GROUP, IDENT):
all_results[tag] = []
for k, v in av_dict.items():
if k not in AVS or v is None:
continue
preproc_res = preprocess_av_result(v, k)
av_tags = tagger.tag(_extract_features(preproc_res))
for res_dict, av_tag in zip(preproc_res, av_tags):
all_results[av_tag].append((res_dict['w'], k))
return dict(all_results)
def get_tag(av_dict, tagger, tag):
"""Create a list of a items tagged as `tag` in the dictionary value.
E.G. get_tag({'SomeAntivirus': "Win32/Trojan"}, tagger, 'platform')
=> [('Win32', 'SomeAntivirus'), ]
:param dict av_dict: AV dictionary to tag.
:param pycrfsuite._pycrfsuite.Tagger tagger: tagger to use.
:param str tag: tag to use.
:rtype: list
"""
all_results = []
for k, v in av_dict.items():
if k not in AVS or v is None:
continue
preproc_res = preprocess_av_result(v, k)
av_tags = tagger.tag(_extract_features(preproc_res))
for res_dict, av_tag in zip(preproc_res, av_tags):
if av_tag == tag:
all_results.append((res_dict['w'], k))
return all_results
def guess_platform(av_dict, tagger, platform_tags=None):
"""Uses heuristics to guess platform from an av dictionary using a tagger.
:param dict av_dict: AV dictionary to tag.
:param pycrfsuite._pycrfsuite.Tagger tagger: tagger to use.
:param list|tuple|None platform_tags: all platform tags.
:rtype: str
"""
WINDOWS = "Win32"
ANDROID = "Android"
def _decide_platform(platform_list):
def map_to_canonical(platform_str):
lower_str = platform_str.lower()[:3]
if lower_str == 'win' or lower_str == 'w32' or lower_str == 'pe':
return WINDOWS
elif lower_str == 'and':
return ANDROID
else:
return UNKNOWN
platform_strings = [WINDOWS, ANDROID, UNKNOWN]
p2c = {p: 0 for p in platform_strings}
for platform in platform_list:
p2c[map_to_canonical(platform)] += 1
res = sorted(p2c.items(), key=lambda x: x[1], reverse=True)
if res[0][1] == 0:
return UNKNOWN
for k, v in res:
if k == UNKNOWN:
continue
return k
return UNKNOWN
if platform_tags is None:
all_results = _extract_tags(get_tag(av_dict, tagger, PLATFORM),
PLATFORM)
else:
all_results = platform_tags
return _decide_platform(all_results)
def guess_by_commonality(av_dict, tagger, tag, precalculated_tags=None):
"""Guess an output tag from an av_dict based on how often it appears.
:param dict av_dict: AV dictionary to tag.
:param pycrfsuite._pycrfsuite.Tagger tagger: tagger to use.
:param str tag: tag to use.
:param list|tuple|None precalculated_tags: all precalculated tags.
:rtype: str
"""
tags_to_count = _extract_tags(get_tag(av_dict, tagger, tag), tag) \
if precalculated_tags is None else precalculated_tags
result = Counter(tags_to_count).most_common(1)
if result:
return result[0][0]
else:
return UNKNOWN
def guess_family(tfidf, tagger, av_dict, idx_to_words, postprocessors=[],
family_tags=None):
"""Guess family probabilities from an av_dict.
E.G. When av_dict is
{"F-Prot": "W32/AddInstall.A", "Comodo": "Application.Win32.InstalleRex.KG"}
the output tuple is
({'AddInstall': 0.82868242670257763, 'InstalleRex': 0.55971906852842446},
{'AddInstall': 'F-Prot', 'InstalleRex': 'Comodo'})
:param sklearn.feature_extraction.text.TfidfVectorizer tfidf: vectorizer.
:param pycrfsuite._pycrfsuite.Tagger tagger: tagger to use.
:param dict av_dict: AV dictionary to tag.
:param dict idx_to_words: index to tokens reverse dictionary.
:param list postprocessors: list of postprocessors to use.
:param list|tuple|None family_tags: precalculated family tags.
:rtype: tuple
"""
if family_tags is None:
tags = (get_tag(av_dict, tagger, FAMILY),)
else:
tags = (family_tags,)
# get AVS because we'll need them later for word to av mapping since the
# order will stay the same
avs = [t[1] for t in tags[0]]
tags = ([t[0] for t in tags[0]], ) # get rid of AV information in tags.
for postprocessor in postprocessors:
tags = postprocessor.families_to_canonical(tags)
m = tfidf.transform(tags)
words_to_vals = {idx_to_words[idx]: val for idx, val in
zip(m.indices, m[0, m.indices].toarray()[0])}
# scale Generic family heuristic
words_to_vals.update(
{k: v/len(words_to_vals) for k, v in words_to_vals.items()
if k in GENERIC_FAMILIES})
words_to_avs = defaultdict(list)
for tag, av in zip(tags[0], avs):
words_to_avs[tag].append(av)
return words_to_vals, words_to_avs
def _guess_everything(tfidf, tagger, av_dict, idx_to_words, postprocessors=[]):
"""Guess all tags from an av dict.
Eg. given av_dict
{"F-Prot": "W32/AddInstall.A",
"Comodo": "Application.Win32.InstalleRex.KG"}
It would guess the following tags.
{'group': 'unknown',
'platform': 'Win32',
'ident': 'A',
'language': 'unknown',
'family': 'AddInstall',
'_type': 'Application',
'compiler': 'unknown'}
The actual tags would depend on what data the models have been trained on.
:param sklearn.feature_extraction.text.TfidfVectorizer tfidf: vectorizer.
:param pycrfsuite._pycrfsuite.Tagger tagger: tagger to use.
:param dict av_dict: AV dictionary to tag.
:param dict idx_to_words: index to tokens reverse dictionary of
tfidf.vocabulary_.
:param list postprocessors: list of postprocessors to use.
:rtype: dict
"""
all_tags = get_all_tags(av_dict, tagger)
family_probs, words_to_avs = guess_family(
tfidf, tagger, av_dict, idx_to_words, postprocessors=postprocessors,
family_tags=all_tags[FAMILY])
families_probs_sorted = sorted(family_probs.items(), key=lambda x: x[1])
if families_probs_sorted:
family = families_probs_sorted.pop()[0]
family_avs = set(words_to_avs[family])
else:
family = UNKNOWN
family_avs = set()
platform = guess_platform(av_dict, tagger,
platform_tags=_extract_tags(all_tags, PLATFORM))
out_dict = {FAMILY: family, PLATFORM: platform}
for tag in (IDENT, GROUP): # guess group and identity only within family
precalculated_tags = [t[0] for t in all_tags[tag]
if t and t[1] in family_avs]
out_dict[tag] = guess_by_commonality(
av_dict, tagger, tag, precalculated_tags=precalculated_tags)
for tag in OTHER_GUESSABLE_TAGS:
out_dict[tag] = guess_by_commonality(
av_dict, tagger, tag, precalculated_tags=_extract_tags(all_tags,
tag))
return out_dict
class Guesser(object):
"""Convenience class to automatically load trained data and guess tags."""
def __init__(self, tfidf=None, tagger=None, postprocessors=[]):
self.tfidf = tfidf if tfidf is not None else load_tfidf(
VOCAB_PATH, IDF_WEIGHTS_PATH)
if type(self.tfidf) != TfidfVectorizer:
raise NameGeneratorException("TfidfVectorizer not loaded correctly")
self.tagger = tagger if tagger is not None else load_tagger(
CRF_MODEL_PATH)
if type(self.tagger) != Tagger:
raise NameGeneratorException("Tagger not loaded correctly.")
self.idx_to_words = {v: k for k, v in self.tfidf.vocabulary_.items()}
try:
self.postprocessors = make_postprocessors() if not postprocessors\
else postprocessors
except Exception as err:
logging.exception(err)
raise NameGeneratorException(err)
def guess_everything(self, av_dict):
"""Guess all tags from an av dict.
Eg. given av_dict
{"F-Prot": "W32/AddInstall.A",
"Comodo": "Application.Win32.InstalleRex.KG"}
It would guess the following tags.
{'group': 'unknown',
'platform': 'Win32',
'ident': 'A',
'language': 'unknown',
'family': 'AddInstall',
'_type': 'Application',
'compiler': 'unknown'}
The actual tags would depend on what data the models have
been trained on.
:param dict av_dict: AV dictionary to tag.
:rtype: dict
"""
try:
return _guess_everything(self.tfidf, self.tagger, av_dict,
self.idx_to_words, self.postprocessors)
except Exception as err:
logging.exception(err)
raise NameGeneratorException(err)
def guess_family(self, av_dict):
"""Guess family probabilities from an av_dict.
E.G. When av_dict is
{"F-Prot": "W32/AddInstall.A",
"Comodo": "Application.Win32.InstalleRex.KG"}
the output dict is
{'AddInstall': 0.82868242670257763, 'InstalleRex': 0.55971906852842446}
:param dict av_dict: AV dictionary to tag.
:rtype: dict
"""
try:
return guess_family(self.tfidf, self.tagger, av_dict,
self.idx_to_words, self.postprocessors)
except Exception as err:
logging.exception(err)
raise NameGeneratorException(err)
|
|
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Service-side implementation of gRPC Python."""
import collections
import enum
import logging
import threading
import time
import six
import grpc
from grpc import _common
from grpc import _interceptor
from grpc._cython import cygrpc
from grpc.framework.foundation import callable_util
_SHUTDOWN_TAG = 'shutdown'
_REQUEST_CALL_TAG = 'request_call'
_RECEIVE_CLOSE_ON_SERVER_TOKEN = 'receive_close_on_server'
_SEND_INITIAL_METADATA_TOKEN = 'send_initial_metadata'
_RECEIVE_MESSAGE_TOKEN = 'receive_message'
_SEND_MESSAGE_TOKEN = 'send_message'
_SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN = (
'send_initial_metadata * send_message')
_SEND_STATUS_FROM_SERVER_TOKEN = 'send_status_from_server'
_SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN = (
'send_initial_metadata * send_status_from_server')
_OPEN = 'open'
_CLOSED = 'closed'
_CANCELLED = 'cancelled'
_EMPTY_FLAGS = 0
_UNEXPECTED_EXIT_SERVER_GRACE = 1.0
def _serialized_request(request_event):
return request_event.batch_operations[0].received_message.bytes()
def _application_code(code):
cygrpc_code = _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE.get(code)
return cygrpc.StatusCode.unknown if cygrpc_code is None else cygrpc_code
def _completion_code(state):
if state.code is None:
return cygrpc.StatusCode.ok
else:
return _application_code(state.code)
def _abortion_code(state, code):
if state.code is None:
return code
else:
return _application_code(state.code)
def _details(state):
return b'' if state.details is None else state.details
class _HandlerCallDetails(
collections.namedtuple('_HandlerCallDetails', (
'method', 'invocation_metadata',)), grpc.HandlerCallDetails):
pass
class _RPCState(object):
def __init__(self):
self.condition = threading.Condition()
self.due = set()
self.request = None
self.client = _OPEN
self.initial_metadata_allowed = True
self.disable_next_compression = False
self.trailing_metadata = None
self.code = None
self.details = None
self.statused = False
self.rpc_errors = []
self.callbacks = []
self.abortion = None
def _raise_rpc_error(state):
rpc_error = grpc.RpcError()
state.rpc_errors.append(rpc_error)
raise rpc_error
def _possibly_finish_call(state, token):
state.due.remove(token)
if (state.client is _CANCELLED or state.statused) and not state.due:
callbacks = state.callbacks
state.callbacks = None
return state, callbacks
else:
return None, ()
def _send_status_from_server(state, token):
def send_status_from_server(unused_send_status_from_server_event):
with state.condition:
return _possibly_finish_call(state, token)
return send_status_from_server
def _abort(state, call, code, details):
if state.client is not _CANCELLED:
effective_code = _abortion_code(state, code)
effective_details = details if state.details is None else state.details
if state.initial_metadata_allowed:
operations = (cygrpc.operation_send_initial_metadata(
(), _EMPTY_FLAGS), cygrpc.operation_send_status_from_server(
state.trailing_metadata, effective_code, effective_details,
_EMPTY_FLAGS),)
token = _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN
else:
operations = (cygrpc.operation_send_status_from_server(
state.trailing_metadata, effective_code, effective_details,
_EMPTY_FLAGS),)
token = _SEND_STATUS_FROM_SERVER_TOKEN
call.start_server_batch(operations,
_send_status_from_server(state, token))
state.statused = True
state.due.add(token)
def _receive_close_on_server(state):
def receive_close_on_server(receive_close_on_server_event):
with state.condition:
if receive_close_on_server_event.batch_operations[
0].received_cancelled:
state.client = _CANCELLED
elif state.client is _OPEN:
state.client = _CLOSED
state.condition.notify_all()
return _possibly_finish_call(state, _RECEIVE_CLOSE_ON_SERVER_TOKEN)
return receive_close_on_server
def _receive_message(state, call, request_deserializer):
def receive_message(receive_message_event):
serialized_request = _serialized_request(receive_message_event)
if serialized_request is None:
with state.condition:
if state.client is _OPEN:
state.client = _CLOSED
state.condition.notify_all()
return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
else:
request = _common.deserialize(serialized_request,
request_deserializer)
with state.condition:
if request is None:
_abort(state, call, cygrpc.StatusCode.internal,
b'Exception deserializing request!')
else:
state.request = request
state.condition.notify_all()
return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
return receive_message
def _send_initial_metadata(state):
def send_initial_metadata(unused_send_initial_metadata_event):
with state.condition:
return _possibly_finish_call(state, _SEND_INITIAL_METADATA_TOKEN)
return send_initial_metadata
def _send_message(state, token):
def send_message(unused_send_message_event):
with state.condition:
state.condition.notify_all()
return _possibly_finish_call(state, token)
return send_message
class _Context(grpc.ServicerContext):
def __init__(self, rpc_event, state, request_deserializer):
self._rpc_event = rpc_event
self._state = state
self._request_deserializer = request_deserializer
def is_active(self):
with self._state.condition:
return self._state.client is not _CANCELLED and not self._state.statused
def time_remaining(self):
return max(
float(self._rpc_event.request_call_details.deadline) - time.time(),
0)
def cancel(self):
self._rpc_event.operation_call.cancel()
def add_callback(self, callback):
with self._state.condition:
if self._state.callbacks is None:
return False
else:
self._state.callbacks.append(callback)
return True
def disable_next_message_compression(self):
with self._state.condition:
self._state.disable_next_compression = True
def invocation_metadata(self):
return self._rpc_event.request_metadata
def peer(self):
return _common.decode(self._rpc_event.operation_call.peer())
def peer_identities(self):
return cygrpc.peer_identities(self._rpc_event.operation_call)
def peer_identity_key(self):
id_key = cygrpc.peer_identity_key(self._rpc_event.operation_call)
return id_key if id_key is None else _common.decode(id_key)
def auth_context(self):
return {
_common.decode(key): value
for key, value in six.iteritems(
cygrpc.auth_context(self._rpc_event.operation_call))
}
def send_initial_metadata(self, initial_metadata):
with self._state.condition:
if self._state.client is _CANCELLED:
_raise_rpc_error(self._state)
else:
if self._state.initial_metadata_allowed:
operation = cygrpc.operation_send_initial_metadata(
initial_metadata, _EMPTY_FLAGS)
self._rpc_event.operation_call.start_server_batch(
(operation,), _send_initial_metadata(self._state))
self._state.initial_metadata_allowed = False
self._state.due.add(_SEND_INITIAL_METADATA_TOKEN)
else:
raise ValueError('Initial metadata no longer allowed!')
def set_trailing_metadata(self, trailing_metadata):
with self._state.condition:
self._state.trailing_metadata = trailing_metadata
def abort(self, code, details):
with self._state.condition:
self._state.code = code
self._state.details = _common.encode(details)
self._state.abortion = Exception()
raise self._state.abortion
def set_code(self, code):
with self._state.condition:
self._state.code = code
def set_details(self, details):
with self._state.condition:
self._state.details = _common.encode(details)
class _RequestIterator(object):
def __init__(self, state, call, request_deserializer):
self._state = state
self._call = call
self._request_deserializer = request_deserializer
def _raise_or_start_receive_message(self):
if self._state.client is _CANCELLED:
_raise_rpc_error(self._state)
elif self._state.client is _CLOSED or self._state.statused:
raise StopIteration()
else:
self._call.start_server_batch(
(cygrpc.operation_receive_message(_EMPTY_FLAGS),),
_receive_message(self._state, self._call,
self._request_deserializer))
self._state.due.add(_RECEIVE_MESSAGE_TOKEN)
def _look_for_request(self):
if self._state.client is _CANCELLED:
_raise_rpc_error(self._state)
elif (self._state.request is None and
_RECEIVE_MESSAGE_TOKEN not in self._state.due):
raise StopIteration()
else:
request = self._state.request
self._state.request = None
return request
def _next(self):
with self._state.condition:
self._raise_or_start_receive_message()
while True:
self._state.condition.wait()
request = self._look_for_request()
if request is not None:
return request
def __iter__(self):
return self
def __next__(self):
return self._next()
def next(self):
return self._next()
def _unary_request(rpc_event, state, request_deserializer):
def unary_request():
with state.condition:
if state.client is _CANCELLED or state.statused:
return None
else:
rpc_event.operation_call.start_server_batch(
(cygrpc.operation_receive_message(_EMPTY_FLAGS),),
_receive_message(state, rpc_event.operation_call,
request_deserializer))
state.due.add(_RECEIVE_MESSAGE_TOKEN)
while True:
state.condition.wait()
if state.request is None:
if state.client is _CLOSED:
details = '"{}" requires exactly one request message.'.format(
rpc_event.request_call_details.method)
_abort(state, rpc_event.operation_call,
cygrpc.StatusCode.unimplemented,
_common.encode(details))
return None
elif state.client is _CANCELLED:
return None
else:
request = state.request
state.request = None
return request
return unary_request
def _call_behavior(rpc_event, state, behavior, argument, request_deserializer):
context = _Context(rpc_event, state, request_deserializer)
try:
return behavior(argument, context), True
except Exception as exception: # pylint: disable=broad-except
with state.condition:
if exception is state.abortion:
_abort(state, rpc_event.operation_call,
cygrpc.StatusCode.unknown, b'RPC Aborted')
elif exception not in state.rpc_errors:
details = 'Exception calling application: {}'.format(exception)
logging.exception(details)
_abort(state, rpc_event.operation_call,
cygrpc.StatusCode.unknown, _common.encode(details))
return None, False
def _take_response_from_response_iterator(rpc_event, state, response_iterator):
try:
return next(response_iterator), True
except StopIteration:
return None, True
except Exception as exception: # pylint: disable=broad-except
with state.condition:
if exception is state.abortion:
_abort(state, rpc_event.operation_call,
cygrpc.StatusCode.unknown, b'RPC Aborted')
elif exception not in state.rpc_errors:
details = 'Exception iterating responses: {}'.format(exception)
logging.exception(details)
_abort(state, rpc_event.operation_call,
cygrpc.StatusCode.unknown, _common.encode(details))
return None, False
def _serialize_response(rpc_event, state, response, response_serializer):
serialized_response = _common.serialize(response, response_serializer)
if serialized_response is None:
with state.condition:
_abort(state, rpc_event.operation_call, cygrpc.StatusCode.internal,
b'Failed to serialize response!')
return None
else:
return serialized_response
def _send_response(rpc_event, state, serialized_response):
with state.condition:
if state.client is _CANCELLED or state.statused:
return False
else:
if state.initial_metadata_allowed:
operations = (cygrpc.operation_send_initial_metadata(
(), _EMPTY_FLAGS), cygrpc.operation_send_message(
serialized_response, _EMPTY_FLAGS),)
state.initial_metadata_allowed = False
token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN
else:
operations = (cygrpc.operation_send_message(serialized_response,
_EMPTY_FLAGS),)
token = _SEND_MESSAGE_TOKEN
rpc_event.operation_call.start_server_batch(
operations, _send_message(state, token))
state.due.add(token)
while True:
state.condition.wait()
if token not in state.due:
return state.client is not _CANCELLED and not state.statused
def _status(rpc_event, state, serialized_response):
with state.condition:
if state.client is not _CANCELLED:
code = _completion_code(state)
details = _details(state)
operations = [
cygrpc.operation_send_status_from_server(
state.trailing_metadata, code, details, _EMPTY_FLAGS),
]
if state.initial_metadata_allowed:
operations.append(
cygrpc.operation_send_initial_metadata((), _EMPTY_FLAGS))
if serialized_response is not None:
operations.append(
cygrpc.operation_send_message(serialized_response,
_EMPTY_FLAGS))
rpc_event.operation_call.start_server_batch(
operations,
_send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN))
state.statused = True
state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN)
def _unary_response_in_pool(rpc_event, state, behavior, argument_thunk,
request_deserializer, response_serializer):
argument = argument_thunk()
if argument is not None:
response, proceed = _call_behavior(rpc_event, state, behavior, argument,
request_deserializer)
if proceed:
serialized_response = _serialize_response(
rpc_event, state, response, response_serializer)
if serialized_response is not None:
_status(rpc_event, state, serialized_response)
def _stream_response_in_pool(rpc_event, state, behavior, argument_thunk,
request_deserializer, response_serializer):
argument = argument_thunk()
if argument is not None:
response_iterator, proceed = _call_behavior(
rpc_event, state, behavior, argument, request_deserializer)
if proceed:
while True:
response, proceed = _take_response_from_response_iterator(
rpc_event, state, response_iterator)
if proceed:
if response is None:
_status(rpc_event, state, None)
break
else:
serialized_response = _serialize_response(
rpc_event, state, response, response_serializer)
if serialized_response is not None:
proceed = _send_response(rpc_event, state,
serialized_response)
if not proceed:
break
else:
break
else:
break
def _handle_unary_unary(rpc_event, state, method_handler, thread_pool):
unary_request = _unary_request(rpc_event, state,
method_handler.request_deserializer)
return thread_pool.submit(_unary_response_in_pool, rpc_event, state,
method_handler.unary_unary, unary_request,
method_handler.request_deserializer,
method_handler.response_serializer)
def _handle_unary_stream(rpc_event, state, method_handler, thread_pool):
unary_request = _unary_request(rpc_event, state,
method_handler.request_deserializer)
return thread_pool.submit(_stream_response_in_pool, rpc_event, state,
method_handler.unary_stream, unary_request,
method_handler.request_deserializer,
method_handler.response_serializer)
def _handle_stream_unary(rpc_event, state, method_handler, thread_pool):
request_iterator = _RequestIterator(state, rpc_event.operation_call,
method_handler.request_deserializer)
return thread_pool.submit(
_unary_response_in_pool, rpc_event, state, method_handler.stream_unary,
lambda: request_iterator, method_handler.request_deserializer,
method_handler.response_serializer)
def _handle_stream_stream(rpc_event, state, method_handler, thread_pool):
request_iterator = _RequestIterator(state, rpc_event.operation_call,
method_handler.request_deserializer)
return thread_pool.submit(
_stream_response_in_pool, rpc_event, state,
method_handler.stream_stream, lambda: request_iterator,
method_handler.request_deserializer, method_handler.response_serializer)
def _find_method_handler(rpc_event, generic_handlers, interceptor_pipeline):
def query_handlers(handler_call_details):
for generic_handler in generic_handlers:
method_handler = generic_handler.service(handler_call_details)
if method_handler is not None:
return method_handler
return None
handler_call_details = _HandlerCallDetails(
_common.decode(rpc_event.request_call_details.method),
rpc_event.request_metadata)
if interceptor_pipeline is not None:
return interceptor_pipeline.execute(query_handlers,
handler_call_details)
else:
return query_handlers(handler_call_details)
def _reject_rpc(rpc_event, status, details):
operations = (cygrpc.operation_send_initial_metadata((), _EMPTY_FLAGS),
cygrpc.operation_receive_close_on_server(_EMPTY_FLAGS),
cygrpc.operation_send_status_from_server((), status, details,
_EMPTY_FLAGS),)
rpc_state = _RPCState()
rpc_event.operation_call.start_server_batch(
operations, lambda ignored_event: (rpc_state, (),))
return rpc_state
def _handle_with_method_handler(rpc_event, method_handler, thread_pool):
state = _RPCState()
with state.condition:
rpc_event.operation_call.start_server_batch(
(cygrpc.operation_receive_close_on_server(_EMPTY_FLAGS),),
_receive_close_on_server(state))
state.due.add(_RECEIVE_CLOSE_ON_SERVER_TOKEN)
if method_handler.request_streaming:
if method_handler.response_streaming:
return state, _handle_stream_stream(rpc_event, state,
method_handler, thread_pool)
else:
return state, _handle_stream_unary(rpc_event, state,
method_handler, thread_pool)
else:
if method_handler.response_streaming:
return state, _handle_unary_stream(rpc_event, state,
method_handler, thread_pool)
else:
return state, _handle_unary_unary(rpc_event, state,
method_handler, thread_pool)
def _handle_call(rpc_event, generic_handlers, interceptor_pipeline, thread_pool,
concurrency_exceeded):
if not rpc_event.success:
return None, None
if rpc_event.request_call_details.method is not None:
try:
method_handler = _find_method_handler(rpc_event, generic_handlers,
interceptor_pipeline)
except Exception as exception: # pylint: disable=broad-except
details = 'Exception servicing handler: {}'.format(exception)
logging.exception(details)
return _reject_rpc(rpc_event, cygrpc.StatusCode.unknown,
b'Error in service handler!'), None
if method_handler is None:
return _reject_rpc(rpc_event, cygrpc.StatusCode.unimplemented,
b'Method not found!'), None
elif concurrency_exceeded:
return _reject_rpc(rpc_event, cygrpc.StatusCode.resource_exhausted,
b'Concurrent RPC limit exceeded!'), None
else:
return _handle_with_method_handler(rpc_event, method_handler,
thread_pool)
else:
return None, None
@enum.unique
class _ServerStage(enum.Enum):
STOPPED = 'stopped'
STARTED = 'started'
GRACE = 'grace'
class _ServerState(object):
# pylint: disable=too-many-arguments
def __init__(self, completion_queue, server, generic_handlers,
interceptor_pipeline, thread_pool, maximum_concurrent_rpcs):
self.lock = threading.RLock()
self.completion_queue = completion_queue
self.server = server
self.generic_handlers = list(generic_handlers)
self.interceptor_pipeline = interceptor_pipeline
self.thread_pool = thread_pool
self.stage = _ServerStage.STOPPED
self.shutdown_events = None
self.maximum_concurrent_rpcs = maximum_concurrent_rpcs
self.active_rpc_count = 0
# TODO(https://github.com/grpc/grpc/issues/6597): eliminate these fields.
self.rpc_states = set()
self.due = set()
def _add_generic_handlers(state, generic_handlers):
with state.lock:
state.generic_handlers.extend(generic_handlers)
def _add_insecure_port(state, address):
with state.lock:
return state.server.add_http2_port(address)
def _add_secure_port(state, address, server_credentials):
with state.lock:
return state.server.add_http2_port(address,
server_credentials._credentials)
def _request_call(state):
state.server.request_call(state.completion_queue, state.completion_queue,
_REQUEST_CALL_TAG)
state.due.add(_REQUEST_CALL_TAG)
# TODO(https://github.com/grpc/grpc/issues/6597): delete this function.
def _stop_serving(state):
if not state.rpc_states and not state.due:
for shutdown_event in state.shutdown_events:
shutdown_event.set()
state.stage = _ServerStage.STOPPED
return True
else:
return False
def _on_call_completed(state):
with state.lock:
state.active_rpc_count -= 1
def _serve(state):
while True:
event = state.completion_queue.poll()
if event.tag is _SHUTDOWN_TAG:
with state.lock:
state.due.remove(_SHUTDOWN_TAG)
if _stop_serving(state):
return
elif event.tag is _REQUEST_CALL_TAG:
with state.lock:
state.due.remove(_REQUEST_CALL_TAG)
concurrency_exceeded = (
state.maximum_concurrent_rpcs is not None and
state.active_rpc_count >= state.maximum_concurrent_rpcs)
rpc_state, rpc_future = _handle_call(
event, state.generic_handlers, state.interceptor_pipeline,
state.thread_pool, concurrency_exceeded)
if rpc_state is not None:
state.rpc_states.add(rpc_state)
if rpc_future is not None:
state.active_rpc_count += 1
rpc_future.add_done_callback(
lambda unused_future: _on_call_completed(state))
if state.stage is _ServerStage.STARTED:
_request_call(state)
elif _stop_serving(state):
return
else:
rpc_state, callbacks = event.tag(event)
for callback in callbacks:
callable_util.call_logging_exceptions(
callback, 'Exception calling callback!')
if rpc_state is not None:
with state.lock:
state.rpc_states.remove(rpc_state)
if _stop_serving(state):
return
# We want to force the deletion of the previous event
# ~before~ we poll again; if the event has a reference
# to a shutdown Call object, this can induce spinlock.
event = None
def _stop(state, grace):
with state.lock:
if state.stage is _ServerStage.STOPPED:
shutdown_event = threading.Event()
shutdown_event.set()
return shutdown_event
else:
if state.stage is _ServerStage.STARTED:
state.server.shutdown(state.completion_queue, _SHUTDOWN_TAG)
state.stage = _ServerStage.GRACE
state.shutdown_events = []
state.due.add(_SHUTDOWN_TAG)
shutdown_event = threading.Event()
state.shutdown_events.append(shutdown_event)
if grace is None:
state.server.cancel_all_calls()
else:
def cancel_all_calls_after_grace():
shutdown_event.wait(timeout=grace)
with state.lock:
state.server.cancel_all_calls()
thread = threading.Thread(target=cancel_all_calls_after_grace)
thread.start()
return shutdown_event
shutdown_event.wait()
return shutdown_event
def _start(state):
with state.lock:
if state.stage is not _ServerStage.STOPPED:
raise ValueError('Cannot start already-started server!')
state.server.start()
state.stage = _ServerStage.STARTED
_request_call(state)
def cleanup_server(timeout):
if timeout is None:
_stop(state, _UNEXPECTED_EXIT_SERVER_GRACE).wait()
else:
_stop(state, timeout).wait()
thread = _common.CleanupThread(
cleanup_server, target=_serve, args=(state,))
thread.start()
class Server(grpc.Server):
# pylint: disable=too-many-arguments
def __init__(self, thread_pool, generic_handlers, interceptors, options,
maximum_concurrent_rpcs):
completion_queue = cygrpc.CompletionQueue()
server = cygrpc.Server(_common.channel_args(options))
server.register_completion_queue(completion_queue)
self._state = _ServerState(completion_queue, server, generic_handlers,
_interceptor.service_pipeline(interceptors),
thread_pool, maximum_concurrent_rpcs)
def add_generic_rpc_handlers(self, generic_rpc_handlers):
_add_generic_handlers(self._state, generic_rpc_handlers)
def add_insecure_port(self, address):
return _add_insecure_port(self._state, _common.encode(address))
def add_secure_port(self, address, server_credentials):
return _add_secure_port(self._state,
_common.encode(address), server_credentials)
def start(self):
_start(self._state)
def stop(self, grace):
return _stop(self._state, grace)
def __del__(self):
_stop(self._state, None)
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test the keras ResNet model with ImageNet data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import tensorflow as tf
from tensorflow.python.eager import context
from official.utils.misc import keras_utils
from official.utils.testing import integration
from official.vision.image_classification import imagenet_preprocessing
from official.vision.image_classification import resnet_imagenet_main
@parameterized.parameters(
"resnet",
"resnet_polynomial_decay",
"mobilenet",
"mobilenet_polynomial_decay")
class KerasImagenetTest(tf.test.TestCase):
"""Unit tests for Keras Models with ImageNet."""
_default_flags_dict = [
"-batch_size", "4",
"-train_steps", "1",
"-use_synthetic_data", "true",
"-data_format", "channels_last",
]
_extra_flags_dict = {
"resnet": [
"-model", "resnet50_v1.5",
"-optimizer", "resnet50_default",
],
"resnet_polynomial_decay": [
"-model", "resnet50_v1.5",
"-optimizer", "resnet50_default",
"-pruning_method", "polynomial_decay",
"-use_tf_keras_layers", "true",
],
"mobilenet": [
"-model", "mobilenet",
"-optimizer", "mobilenet_default",
],
"mobilenet_polynomial_decay": [
"-model", "mobilenet",
"-optimizer", "mobilenet_default",
"-pruning_method", "polynomial_decay",
],
}
_tempdir = None
@classmethod
def setUpClass(cls): # pylint: disable=invalid-name
super(KerasImagenetTest, cls).setUpClass()
resnet_imagenet_main.define_imagenet_keras_flags()
def setUp(self):
super(KerasImagenetTest, self).setUp()
imagenet_preprocessing.NUM_IMAGES["validation"] = 4
self.policy = \
tf.compat.v2.keras.mixed_precision.experimental.global_policy()
def tearDown(self):
super(KerasImagenetTest, self).tearDown()
tf.io.gfile.rmtree(self.get_temp_dir())
tf.compat.v2.keras.mixed_precision.experimental.set_policy(self.policy)
def get_extra_flags_dict(self, flags_key):
return self._extra_flags_dict[flags_key] + self._default_flags_dict
def test_end_to_end_no_dist_strat(self, flags_key):
"""Test Keras model with 1 GPU, no distribution strategy."""
config = keras_utils.get_config_proto_v1()
tf.compat.v1.enable_eager_execution(config=config)
extra_flags = [
"-distribution_strategy", "off",
]
extra_flags = extra_flags + self.get_extra_flags_dict(flags_key)
integration.run_synthetic(
main=resnet_imagenet_main.run,
tmp_root=self.get_temp_dir(),
extra_flags=extra_flags
)
def test_end_to_end_graph_no_dist_strat(self, flags_key):
"""Test Keras model in legacy graph mode with 1 GPU, no dist strat."""
extra_flags = [
"-enable_eager", "false",
"-distribution_strategy", "off",
]
extra_flags = extra_flags + self.get_extra_flags_dict(flags_key)
integration.run_synthetic(
main=resnet_imagenet_main.run,
tmp_root=self.get_temp_dir(),
extra_flags=extra_flags
)
def test_end_to_end_1_gpu(self, flags_key):
"""Test Keras model with 1 GPU."""
config = keras_utils.get_config_proto_v1()
tf.compat.v1.enable_eager_execution(config=config)
if context.num_gpus() < 1:
self.skipTest(
"{} GPUs are not available for this test. {} GPUs are available".
format(1, context.num_gpus()))
extra_flags = [
"-num_gpus", "1",
"-distribution_strategy", "mirrored",
"-enable_checkpoint_and_export", "1",
]
extra_flags = extra_flags + self.get_extra_flags_dict(flags_key)
integration.run_synthetic(
main=resnet_imagenet_main.run,
tmp_root=self.get_temp_dir(),
extra_flags=extra_flags
)
def test_end_to_end_1_gpu_fp16(self, flags_key):
"""Test Keras model with 1 GPU and fp16."""
config = keras_utils.get_config_proto_v1()
tf.compat.v1.enable_eager_execution(config=config)
if context.num_gpus() < 1:
self.skipTest(
"{} GPUs are not available for this test. {} GPUs are available"
.format(1, context.num_gpus()))
extra_flags = [
"-num_gpus", "1",
"-dtype", "fp16",
"-distribution_strategy", "mirrored",
]
extra_flags = extra_flags + self.get_extra_flags_dict(flags_key)
if "polynomial_decay" in extra_flags:
self.skipTest("Pruning with fp16 is not currently supported.")
integration.run_synthetic(
main=resnet_imagenet_main.run,
tmp_root=self.get_temp_dir(),
extra_flags=extra_flags
)
def test_end_to_end_2_gpu(self, flags_key):
"""Test Keras model with 2 GPUs."""
config = keras_utils.get_config_proto_v1()
tf.compat.v1.enable_eager_execution(config=config)
if context.num_gpus() < 2:
self.skipTest(
"{} GPUs are not available for this test. {} GPUs are available".
format(2, context.num_gpus()))
extra_flags = [
"-num_gpus", "2",
"-distribution_strategy", "mirrored",
]
extra_flags = extra_flags + self.get_extra_flags_dict(flags_key)
integration.run_synthetic(
main=resnet_imagenet_main.run,
tmp_root=self.get_temp_dir(),
extra_flags=extra_flags
)
def test_end_to_end_xla_2_gpu(self, flags_key):
"""Test Keras model with XLA and 2 GPUs."""
config = keras_utils.get_config_proto_v1()
tf.compat.v1.enable_eager_execution(config=config)
if context.num_gpus() < 2:
self.skipTest(
"{} GPUs are not available for this test. {} GPUs are available".
format(2, context.num_gpus()))
extra_flags = [
"-num_gpus", "2",
"-enable_xla", "true",
"-distribution_strategy", "mirrored",
]
extra_flags = extra_flags + self.get_extra_flags_dict(flags_key)
integration.run_synthetic(
main=resnet_imagenet_main.run,
tmp_root=self.get_temp_dir(),
extra_flags=extra_flags
)
def test_end_to_end_2_gpu_fp16(self, flags_key):
"""Test Keras model with 2 GPUs and fp16."""
config = keras_utils.get_config_proto_v1()
tf.compat.v1.enable_eager_execution(config=config)
if context.num_gpus() < 2:
self.skipTest(
"{} GPUs are not available for this test. {} GPUs are available".
format(2, context.num_gpus()))
extra_flags = [
"-num_gpus", "2",
"-dtype", "fp16",
"-distribution_strategy", "mirrored",
]
extra_flags = extra_flags + self.get_extra_flags_dict(flags_key)
if "polynomial_decay" in extra_flags:
self.skipTest("Pruning with fp16 is not currently supported.")
integration.run_synthetic(
main=resnet_imagenet_main.run,
tmp_root=self.get_temp_dir(),
extra_flags=extra_flags
)
def test_end_to_end_xla_2_gpu_fp16(self, flags_key):
"""Test Keras model with XLA, 2 GPUs and fp16."""
config = keras_utils.get_config_proto_v1()
tf.compat.v1.enable_eager_execution(config=config)
if context.num_gpus() < 2:
self.skipTest(
"{} GPUs are not available for this test. {} GPUs are available".
format(2, context.num_gpus()))
extra_flags = [
"-num_gpus", "2",
"-dtype", "fp16",
"-enable_xla", "true",
"-distribution_strategy", "mirrored",
]
extra_flags = extra_flags + self.get_extra_flags_dict(flags_key)
if "polynomial_decay" in extra_flags:
self.skipTest("Pruning with fp16 is not currently supported.")
integration.run_synthetic(
main=resnet_imagenet_main.run,
tmp_root=self.get_temp_dir(),
extra_flags=extra_flags
)
if __name__ == "__main__":
tf.compat.v1.enable_v2_behavior()
tf.test.main()
|
|
from __future__ import absolute_import,unicode_literals
import re
import sqlite3
import synapse.compat as s_compat
import synapse.cores.common as s_cores_common
from synapse.compat import queue
from synapse.common import now,genpath
stashre = re.compile('{{([A-Z]+)}}')
int_t = s_compat.typeof(0)
str_t = s_compat.typeof('visi')
none_t = s_compat.typeof(None)
class CoreXact(s_cores_common.CoreXact):
def _coreXactInit(self):
self.db = None
self.cursor = None
def _coreXactCommit(self):
self.cursor.execute('COMMIT')
def _coreXactBegin(self):
self.cursor.execute('BEGIN TRANSACTION')
def _coreXactAcquire(self):
self.db = self.core.dbpool.get()
self.cursor = self.db.cursor()
def _coreXactRelease(self):
self.cursor.close()
self.core.dbpool.put( self.db )
self.db = None
self.cursor = None
class DbPool:
'''
The DbPool allows generic db connection pooling using
a factory/ctor method and a python queue.
Example:
def connectdb():
# do stuff
return db
pool = DbPool(3, connectdb)
'''
def __init__(self, size, ctor):
# TODO: high/low water marks
self.size = size
self.ctor = ctor
self.dbque = queue.Queue()
for i in range(size):
db = ctor()
self.put( db )
def put(self, db):
'''
Add/Return a db connection to the pool.
'''
self.dbque.put(db)
def get(self):
return self.dbque.get()
class Cortex(s_cores_common.Cortex):
dblim = -1
_t_istable = '''
SELECT
name
FROM
sqlite_master
WHERE
type='table'
AND
name={{NAME}}
'''
_t_inittable = '''
CREATE TABLE {{TABLE}} (
iden VARCHAR,
prop VARCHAR,
strval TEXT,
intval BIGINT,
tstamp BIGINT
);
'''
_t_init_iden_idx = 'CREATE INDEX {{TABLE}}_iden_idx ON {{TABLE}} (iden,prop)'
_t_init_prop_idx = 'CREATE INDEX {{TABLE}}_prop_time_idx ON {{TABLE}} (prop,tstamp)'
_t_init_strval_idx = 'CREATE INDEX {{TABLE}}_strval_idx ON {{TABLE}} (prop,strval,tstamp)'
_t_init_intval_idx = 'CREATE INDEX {{TABLE}}_intval_idx ON {{TABLE}} (prop,intval,tstamp)'
_t_addrows = 'INSERT INTO {{TABLE}} (iden,prop,strval,intval,tstamp) VALUES ({{IDEN}},{{PROP}},{{STRVAL}},{{INTVAL}},{{TSTAMP}})'
_t_getrows_by_iden = 'SELECT * FROM {{TABLE}} WHERE iden={{IDEN}}'
_t_getrows_by_range = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} and intval >= {{MINVALU}} AND intval < {{MAXVALU}} LIMIT {{LIMIT}}'
_t_getrows_by_le = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} and intval <= {{VALU}} LIMIT {{LIMIT}}'
_t_getrows_by_ge = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} and intval >= {{VALU}} LIMIT {{LIMIT}}'
_t_getrows_by_iden_prop = 'SELECT * FROM {{TABLE}} WHERE iden={{IDEN}} AND prop={{PROP}}'
_t_getrows_by_iden_prop_intval = 'SELECT * FROM {{TABLE}} WHERE iden={{IDEN}} AND prop={{PROP}} AND intval={{VALU}}'
_t_getrows_by_iden_prop_strval = 'SELECT * FROM {{TABLE}} WHERE iden={{IDEN}} AND prop={{PROP}} AND strval={{VALU}}'
################################################################################
_t_getrows_by_prop = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_int = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_str = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_wmin = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp >= {{MINTIME}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_int_wmin = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp >= {{MINTIME}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_str_wmin = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp >= {{MINTIME}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_wmax = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_int_wmax = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_str_wmax = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_wminmax = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_int_wminmax = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp >= {{MINTIME}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getrows_by_prop_str_wminmax = 'SELECT * FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp >= {{MINTIME}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
################################################################################
_t_getsize_by_prop = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_int = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_str = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_wmin = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp>={{MINTIME}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_int_wmin = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp>={{MINTIME}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_str_wmin = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp>={{MINTIME}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_wmax = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_int_wmax = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_str_wmax = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_wminmax = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_int_wminmax = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
_t_getsize_by_prop_str_wminmax = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}}'
################################################################################
_t_getsize_by_range = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} and intval >= {{MINVALU}} AND intval < {{MAXVALU}} LIMIT {{LIMIT}}'
_t_getsize_by_le = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} and intval <= {{VALU}} LIMIT {{LIMIT}}'
_t_getsize_by_ge = 'SELECT COUNT(*) FROM {{TABLE}} WHERE prop={{PROP}} and intval >= {{VALU}} LIMIT {{LIMIT}}'
_t_delrows_by_iden = 'DELETE FROM {{TABLE}} WHERE iden={{IDEN}}'
_t_delrows_by_iden_prop = 'DELETE FROM {{TABLE}} WHERE iden={{IDEN}} AND prop={{PROP}}'
_t_delrows_by_iden_prop_strval = 'DELETE FROM {{TABLE}} WHERE iden={{IDEN}} AND prop={{PROP}} AND strval={{VALU}}'
_t_delrows_by_iden_prop_intval = 'DELETE FROM {{TABLE}} WHERE iden={{IDEN}} AND prop={{PROP}} AND intval={{VALU}}'
################################################################################
_t_delrows_by_prop = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}}'
_t_delrows_by_prop_int = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}}'
_t_delrows_by_prop_str = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}}'
_t_delrows_by_prop_wmin = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp>={{MINTIME}}'
_t_delrows_by_prop_int_wmin = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp>={{MINTIME}}'
_t_delrows_by_prop_str_wmin = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp>={{MINTIME}}'
_t_delrows_by_prop_wmax = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp<{{MAXTIME}}'
_t_delrows_by_prop_int_wmax = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp<{{MAXTIME}}'
_t_delrows_by_prop_str_wmax = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp<{{MAXTIME}}'
_t_delrows_by_prop_wminmax = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}}'
_t_delrows_by_prop_int_wminmax = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}}'
_t_delrows_by_prop_str_wminmax = 'DELETE FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}}'
################################################################################
_t_getjoin_by_prop = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_int = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_str = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_wmin = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp>={{MINTIME}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_int_wmin = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp>={{MINTIME}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_str_wmin = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp>={{MINTIME}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_wmax = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_int_wmax = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_str_wmax = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_wminmax = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_int_wminmax = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}})'
_t_getjoin_by_prop_str_wminmax = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}} LIMIT {{LIMIT}})'
_t_getjoin_by_range_int = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} and {{MINVALU}} <= intval AND intval < {{MAXVALU}} LIMIT {{LIMIT}})'
_t_getjoin_by_range_str = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} and {{MINVALU}} <= strval AND strval < {{MAXVALU}} LIMIT {{LIMIT}})'
_t_getjoin_by_le_int = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} and intval <= {{VALU}} LIMIT {{LIMIT}})'
_t_getjoin_by_ge_int = 'SELECT * FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} and intval >= {{VALU}} LIMIT {{LIMIT}})'
################################################################################
_t_deljoin_by_prop = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}})'
_t_deljoin_by_prop_int = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}})'
_t_deljoin_by_prop_str = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}})'
_t_deljoin_by_prop_wmin = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp>={{MINTIME}} )'
_t_deljoin_by_prop_int_wmin = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp>={{MINTIME}} )'
_t_deljoin_by_prop_str_wmin = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp>={{MINTIME}} )'
_t_deljoin_by_prop_wmax = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp<{{MAXTIME}} )'
_t_deljoin_by_prop_int_wmax = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp<{{MAXTIME}} )'
_t_deljoin_by_prop_str_wmax = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp<{{MAXTIME}} )'
_t_deljoin_by_prop_wminmax = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND tstamp>={{MINTIME}} AND tstamp < {{MAXTIME}})'
_t_deljoin_by_prop_int_wminmax = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND intval={{VALU}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}})'
_t_deljoin_by_prop_str_wminmax = 'DELETE FROM {{TABLE}} WHERE iden IN (SELECT iden FROM {{TABLE}} WHERE prop={{PROP}} AND strval={{VALU}} AND tstamp>={{MINTIME}} AND tstamp<{{MAXTIME}})'
################################################################################
_t_uprows_by_iden_prop_str = 'UPDATE {{TABLE}} SET strval={{VALU}} WHERE iden={{IDEN}} and prop={{PROP}}'
_t_uprows_by_iden_prop_int = 'UPDATE {{TABLE}} SET intval={{VALU}} WHERE iden={{IDEN}} and prop={{PROP}}'
def _initDbInfo(self):
name = self._link[1].get('path')[1:]
if not name:
raise Exception('No Path Specified!')
if name.find(':') == -1:
name = genpath(name)
return {'name':name}
def _getCoreXact(self, size=None):
return CoreXact(self, size=size)
def _getDbLimit(self, limit):
if limit != None:
return limit
return self.dblim
def _rowsByRange(self, prop, valu, limit=None):
limit = self._getDbLimit(limit)
q = self._q_getrows_by_range
args = [ prop, valu[0], valu[1], limit ]
rows = self.select(q, prop=prop, minvalu=valu[0], maxvalu=valu[1], limit=limit)
return self._foldTypeCols(rows)
def _rowsByGe(self, prop, valu, limit=None):
limit = self._getDbLimit(limit)
q = self._q_getrows_by_ge
rows = self.select(q, prop=prop, valu=valu, limit=limit)
return self._foldTypeCols(rows)
def _rowsByLe(self, prop, valu, limit=None):
limit = self._getDbLimit(limit)
q = self._q_getrows_by_le
rows = self.select(q, prop=prop, valu=valu, limit=limit)
return self._foldTypeCols(rows)
def _sizeByRange(self, prop, valu, limit=None):
limit = self._getDbLimit(limit)
q = self._q_getsize_by_range
return self.select(q,prop=prop,minvalu=valu[0],maxvalu=valu[1],limit=limit)[0][0]
def _sizeByGe(self, prop, valu, limit=None):
limit = self._getDbLimit(limit)
q = self._q_getsize_by_ge
return self.select(q,prop=prop,valu=valu,limit=limit)[0][0]
def _sizeByLe(self, prop, valu, limit=None):
limit = self._getDbLimit(limit)
q = self._q_getsize_by_le
args = [ prop, valu, limit ]
return self.select(q,prop=prop,valu=valu,limit=limit)[0][0]
def _initDbConn(self):
dbinfo = self._initDbInfo()
dbname = dbinfo.get('name')
db = sqlite3.connect(dbname, check_same_thread=False)
db.isolation_level = None
def onfini():
db.close()
self.onfini(onfini)
return db
def _getTableName(self):
return 'syncortex'
def _addVarDecor(self, name):
return ':%s' % (name,)
def _initCortex(self):
self.initSizeBy('ge',self._sizeByGe)
self.initRowsBy('ge',self._rowsByGe)
self.initSizeBy('le',self._sizeByLe)
self.initRowsBy('le',self._rowsByLe)
self.initSizeBy('range',self._sizeByRange)
self.initRowsBy('range',self._rowsByRange)
self.initTufosBy('ge', self._tufosByGe)
self.initTufosBy('le', self._tufosByLe)
self.initTufosBy('range',self._tufosByRange)
# borrow the helpers from common
self.initTufosBy('gt', self._tufosByGt)
self.initTufosBy('lt', self._tufosByLt)
self.dbpool = self._link[1].get('dbpool')
if self.dbpool == None:
pool = int( self._link[1].get('pool',1) )
self.dbpool = DbPool(pool, self._initDbConn)
table = self._getTableName()
self._initCorQueries()
if not self._checkForTable( table ):
self._initCorTable( table )
def _prepQuery(self, query):
# prep query strings by replacing all %s with table name
# and all ? with db specific variable token
table = self._getTableName()
query = query.replace('{{TABLE}}',table)
for name in stashre.findall(query):
query = query.replace('{{%s}}' % name, self._addVarDecor(name.lower()))
return query
def _initCorQueries(self):
self._q_istable = self._prepQuery(self._t_istable)
self._q_inittable = self._prepQuery(self._t_inittable)
self._q_init_iden_idx = self._prepQuery(self._t_init_iden_idx)
self._q_init_prop_idx = self._prepQuery(self._t_init_prop_idx)
self._q_init_strval_idx = self._prepQuery(self._t_init_strval_idx)
self._q_init_intval_idx = self._prepQuery(self._t_init_intval_idx)
self._q_addrows = self._prepQuery(self._t_addrows)
self._q_getrows_by_iden = self._prepQuery(self._t_getrows_by_iden)
self._q_getrows_by_range = self._prepQuery(self._t_getrows_by_range)
self._q_getrows_by_ge = self._prepQuery(self._t_getrows_by_ge)
self._q_getrows_by_le = self._prepQuery(self._t_getrows_by_le)
self._q_getrows_by_iden_prop = self._prepQuery(self._t_getrows_by_iden_prop)
self._q_getrows_by_iden_prop_intval = self._prepQuery(self._t_getrows_by_iden_prop_intval)
self._q_getrows_by_iden_prop_strval = self._prepQuery(self._t_getrows_by_iden_prop_strval)
###################################################################################
self._q_getrows_by_prop = self._prepQuery(self._t_getrows_by_prop)
self._q_getrows_by_prop_wmin = self._prepQuery(self._t_getrows_by_prop_wmin)
self._q_getrows_by_prop_wmax = self._prepQuery(self._t_getrows_by_prop_wmax)
self._q_getrows_by_prop_wminmax = self._prepQuery(self._t_getrows_by_prop_wminmax)
###################################################################################
self._q_getrows_by_prop_int = self._prepQuery(self._t_getrows_by_prop_int)
self._q_getrows_by_prop_int_wmin = self._prepQuery(self._t_getrows_by_prop_int_wmin)
self._q_getrows_by_prop_int_wmax = self._prepQuery(self._t_getrows_by_prop_int_wmax)
self._q_getrows_by_prop_int_wminmax = self._prepQuery(self._t_getrows_by_prop_int_wminmax)
###################################################################################
self._q_getrows_by_prop_str = self._prepQuery(self._t_getrows_by_prop_str)
self._q_getrows_by_prop_str_wmin = self._prepQuery(self._t_getrows_by_prop_str_wmin)
self._q_getrows_by_prop_str_wmax = self._prepQuery(self._t_getrows_by_prop_str_wmax)
self._q_getrows_by_prop_str_wminmax = self._prepQuery(self._t_getrows_by_prop_str_wminmax)
###################################################################################
self._q_getjoin_by_prop = self._prepQuery(self._t_getjoin_by_prop)
self._q_getjoin_by_prop_wmin = self._prepQuery(self._t_getjoin_by_prop_wmin)
self._q_getjoin_by_prop_wmax = self._prepQuery(self._t_getjoin_by_prop_wmax)
self._q_getjoin_by_prop_wminmax = self._prepQuery(self._t_getjoin_by_prop_wminmax)
###################################################################################
self._q_getjoin_by_prop_int = self._prepQuery(self._t_getjoin_by_prop_int)
self._q_getjoin_by_prop_int_wmin = self._prepQuery(self._t_getjoin_by_prop_int_wmin)
self._q_getjoin_by_prop_int_wmax = self._prepQuery(self._t_getjoin_by_prop_int_wmax)
self._q_getjoin_by_prop_int_wminmax = self._prepQuery(self._t_getjoin_by_prop_int_wminmax)
###################################################################################
self._q_getjoin_by_prop_str = self._prepQuery(self._t_getjoin_by_prop_str)
self._q_getjoin_by_prop_str_wmin = self._prepQuery(self._t_getjoin_by_prop_str_wmin)
self._q_getjoin_by_prop_str_wmax = self._prepQuery(self._t_getjoin_by_prop_str_wmax)
self._q_getjoin_by_prop_str_wminmax = self._prepQuery(self._t_getjoin_by_prop_str_wminmax)
###################################################################################
self._q_getsize_by_prop = self._prepQuery(self._t_getsize_by_prop)
self._q_getsize_by_prop_wmin = self._prepQuery(self._t_getsize_by_prop_wmin)
self._q_getsize_by_prop_wmax = self._prepQuery(self._t_getsize_by_prop_wmax)
self._q_getsize_by_prop_wminmax = self._prepQuery(self._t_getsize_by_prop_wminmax)
###################################################################################
self._q_getsize_by_prop_int = self._prepQuery(self._t_getsize_by_prop_int)
self._q_getsize_by_prop_int_wmin = self._prepQuery(self._t_getsize_by_prop_int_wmin)
self._q_getsize_by_prop_int_wmax = self._prepQuery(self._t_getsize_by_prop_int_wmax)
self._q_getsize_by_prop_int_wminmax = self._prepQuery(self._t_getsize_by_prop_int_wminmax)
###################################################################################
self._q_getsize_by_prop_str = self._prepQuery(self._t_getsize_by_prop_str)
self._q_getsize_by_prop_str_wmin = self._prepQuery(self._t_getsize_by_prop_str_wmin)
self._q_getsize_by_prop_str_wmax = self._prepQuery(self._t_getsize_by_prop_str_wmax)
self._q_getsize_by_prop_str_wminmax = self._prepQuery(self._t_getsize_by_prop_str_wminmax)
###################################################################################
self.qbuild = {
'rowsbyprop':{
(none_t,none_t,none_t):self._q_getrows_by_prop,
(none_t,int_t,none_t):self._q_getrows_by_prop_wmin,
(none_t,none_t,int_t):self._q_getrows_by_prop_wmax,
(none_t,int_t,int_t):self._q_getrows_by_prop_wminmax,
(int_t,none_t,none_t):self._q_getrows_by_prop_int,
(int_t,int_t,none_t):self._q_getrows_by_prop_int_wmin,
(int_t,none_t,int_t):self._q_getrows_by_prop_int_wmax,
(int_t,int_t,int_t):self._q_getrows_by_prop_int_wminmax,
(str_t,none_t,none_t):self._q_getrows_by_prop_str,
(str_t,int_t,none_t):self._q_getrows_by_prop_str_wmin,
(str_t,none_t,int_t):self._q_getrows_by_prop_str_wmax,
(str_t,int_t,int_t):self._q_getrows_by_prop_str_wminmax,
},
'joinbyprop':{
(none_t,none_t,none_t):self._q_getjoin_by_prop,
(none_t,int_t,none_t):self._q_getjoin_by_prop_wmin,
(none_t,none_t,int_t):self._q_getjoin_by_prop_wmax,
(none_t,int_t,int_t):self._q_getjoin_by_prop_wminmax,
(int_t,none_t,none_t):self._q_getjoin_by_prop_int,
(int_t,int_t,none_t):self._q_getjoin_by_prop_int_wmin,
(int_t,none_t,int_t):self._q_getjoin_by_prop_int_wmax,
(int_t,int_t,int_t):self._q_getjoin_by_prop_int_wminmax,
(str_t,none_t,none_t):self._q_getjoin_by_prop_str,
(str_t,int_t,none_t):self._q_getjoin_by_prop_str_wmin,
(str_t,none_t,int_t):self._q_getjoin_by_prop_str_wmax,
(str_t,int_t,int_t):self._q_getjoin_by_prop_str_wminmax,
},
'sizebyprop':{
(none_t,none_t,none_t):self._q_getsize_by_prop,
(none_t,int_t,none_t):self._q_getsize_by_prop_wmin,
(none_t,none_t,int_t):self._q_getsize_by_prop_wmax,
(none_t,int_t,int_t):self._q_getsize_by_prop_wminmax,
(int_t,none_t,none_t):self._q_getsize_by_prop_int,
(int_t,int_t,none_t):self._q_getsize_by_prop_int_wmin,
(int_t,none_t,int_t):self._q_getsize_by_prop_int_wmax,
(int_t,int_t,int_t):self._q_getsize_by_prop_int_wminmax,
(str_t,none_t,none_t):self._q_getsize_by_prop_str,
(str_t,int_t,none_t):self._q_getsize_by_prop_str_wmin,
(str_t,none_t,int_t):self._q_getsize_by_prop_str_wmax,
(str_t,int_t,int_t):self._q_getsize_by_prop_str_wminmax,
},
'delrowsbyprop':{
(none_t,none_t,none_t):self._prepQuery(self._t_delrows_by_prop),
(none_t,int_t,none_t):self._prepQuery(self._t_delrows_by_prop_wmin),
(none_t,none_t,int_t):self._prepQuery(self._t_delrows_by_prop_wmax),
(none_t,int_t,int_t):self._prepQuery(self._t_delrows_by_prop_wminmax),
(int_t,none_t,none_t):self._prepQuery(self._t_delrows_by_prop_int),
(int_t,int_t,none_t):self._prepQuery(self._t_delrows_by_prop_int_wmin),
(int_t,none_t,int_t):self._prepQuery(self._t_delrows_by_prop_int_wmax),
(int_t,int_t,int_t):self._prepQuery(self._t_delrows_by_prop_int_wminmax),
(str_t,none_t,none_t):self._prepQuery(self._t_delrows_by_prop_str),
(str_t,int_t,none_t):self._prepQuery(self._t_delrows_by_prop_str_wmin),
(str_t,none_t,int_t):self._prepQuery(self._t_delrows_by_prop_str_wmax),
(str_t,int_t,int_t):self._prepQuery(self._t_delrows_by_prop_str_wminmax),
},
'deljoinbyprop':{
(none_t,none_t,none_t):self._prepQuery(self._t_deljoin_by_prop),
(none_t,int_t,none_t):self._prepQuery(self._t_deljoin_by_prop_wmin),
(none_t,none_t,int_t):self._prepQuery(self._t_deljoin_by_prop_wmax),
(none_t,int_t,int_t):self._prepQuery(self._t_deljoin_by_prop_wminmax),
(int_t,none_t,none_t):self._prepQuery(self._t_deljoin_by_prop_int),
(int_t,int_t,none_t):self._prepQuery(self._t_deljoin_by_prop_int_wmin),
(int_t,none_t,int_t):self._prepQuery(self._t_deljoin_by_prop_int_wmax),
(int_t,int_t,int_t):self._prepQuery(self._t_deljoin_by_prop_int_wminmax),
(str_t,none_t,none_t):self._prepQuery(self._t_deljoin_by_prop_str),
(str_t,int_t,none_t):self._prepQuery(self._t_deljoin_by_prop_str_wmin),
(str_t,none_t,int_t):self._prepQuery(self._t_deljoin_by_prop_str_wmax),
(str_t,int_t,int_t):self._prepQuery(self._t_deljoin_by_prop_str_wminmax),
}
}
self._q_getsize_by_prop = self._prepQuery(self._t_getsize_by_prop)
self._q_getsize_by_ge = self._prepQuery(self._t_getsize_by_ge)
self._q_getsize_by_le = self._prepQuery(self._t_getsize_by_le)
self._q_getsize_by_range = self._prepQuery(self._t_getsize_by_range)
self._q_delrows_by_iden = self._prepQuery(self._t_delrows_by_iden)
self._q_delrows_by_iden_prop = self._prepQuery(self._t_delrows_by_iden_prop)
self._q_delrows_by_iden_prop_intval = self._prepQuery(self._t_delrows_by_iden_prop_intval)
self._q_delrows_by_iden_prop_strval = self._prepQuery(self._t_delrows_by_iden_prop_strval)
self._q_uprows_by_iden_prop_str = self._prepQuery(self._t_uprows_by_iden_prop_str)
self._q_uprows_by_iden_prop_int = self._prepQuery(self._t_uprows_by_iden_prop_int)
self._q_getjoin_by_range_str = self._prepQuery(self._t_getjoin_by_range_str)
self._q_getjoin_by_range_int = self._prepQuery(self._t_getjoin_by_range_int)
self._q_getjoin_by_ge_int = self._prepQuery(self._t_getjoin_by_ge_int)
self._q_getjoin_by_le_int = self._prepQuery(self._t_getjoin_by_le_int)
def _checkForTable(self, name):
return len(self.select(self._q_istable, name=name))
def _initCorTable(self, name):
with self.getCoreXact() as xact:
xact.cursor.execute(self._q_inittable)
xact.cursor.execute(self._q_init_iden_idx)
xact.cursor.execute(self._q_init_prop_idx)
xact.cursor.execute(self._q_init_strval_idx)
xact.cursor.execute(self._q_init_intval_idx)
def _addRows(self, rows):
args = []
for i,p,v,t in rows:
if s_compat.isint(v):
args.append( {'iden':i, 'prop':p, 'intval':v, 'strval':None, 'tstamp':t} )
else:
args.append( {'iden':i, 'prop':p, 'intval':None, 'strval':v, 'tstamp':t} )
with self.getCoreXact() as xact:
xact.cursor.executemany( self._q_addrows, args )
def update(self, q, **args):
with self.getCoreXact() as xact:
xact.cursor.execute(q,args)
return xact.cursor.rowcount
def select(self, q, **args):
with self.getCoreXact() as xact:
xact.cursor.execute(q,args)
return xact.cursor.fetchall()
def delete(self, q, **args):
with self.getCoreXact() as xact:
xact.cursor.execute(q,args)
def _foldTypeCols(self, rows):
ret = []
for iden,prop,intval,strval,tstamp in rows:
if intval != None:
ret.append( (iden,prop,intval,tstamp) )
else:
ret.append( (iden,prop,strval,tstamp) )
return ret
def _getRowsById(self, iden):
rows = self.select(self._q_getrows_by_iden,iden=iden)
return self._foldTypeCols(rows)
def _getSizeByProp(self, prop, valu=None, limit=None, mintime=None, maxtime=None):
rows = self._runPropQuery('sizebyprop',prop,valu=valu,limit=limit,mintime=mintime,maxtime=maxtime)
return rows[0][0]
def _getRowsByProp(self, prop, valu=None, limit=None, mintime=None, maxtime=None):
rows = self._runPropQuery('rowsbyprop',prop,valu=valu,limit=limit,mintime=mintime,maxtime=maxtime)
return self._foldTypeCols(rows)
def _tufosByRange(self, prop, valu, limit=None):
if len(valu) != 2:
return []
minvalu,maxvalu = valu
if not s_compat.isint(minvalu) or not s_compat.isint(maxvalu):
raise Exception('by "range" requires (int,int)')
limit = self._getDbLimit(limit)
rows = self.select(self._q_getjoin_by_range_int, prop=prop, minvalu=minvalu, maxvalu=maxvalu, limit=limit)
rows = self._foldTypeCols(rows)
return self._rowsToTufos(rows)
def _tufosByLe(self, prop, valu, limit=None):
valu,_ = self.getPropFrob(prop,valu)
limit = self._getDbLimit(limit)
rows = self.select(self._q_getjoin_by_le_int, prop=prop, valu=valu, limit=limit)
rows = self._foldTypeCols(rows)
return self._rowsToTufos(rows)
def _tufosByGe(self, prop, valu, limit=None):
valu,_ = self.getPropFrob(prop,valu)
limit = self._getDbLimit(limit)
rows = self.select(self._q_getjoin_by_ge_int, prop=prop, valu=valu, limit=limit)
rows = self._foldTypeCols(rows)
return self._rowsToTufos(rows)
def _runPropQuery(self, name, prop, valu=None, limit=None, mintime=None, maxtime=None, meth=None, nolim=False):
limit = self._getDbLimit(limit)
qkey = (s_compat.typeof(valu),s_compat.typeof(mintime),s_compat.typeof(maxtime))
qstr = self.qbuild[name][qkey]
if meth == None:
meth = self.select
rows = meth(qstr, prop=prop, valu=valu, limit=limit, mintime=mintime, maxtime=maxtime)
return rows
def _delRowsByIdProp(self, iden, prop, valu=None):
if valu == None:
return self.delete( self._q_delrows_by_iden_prop, iden=iden, prop=prop )
if s_compat.isint(valu):
return self.delete( self._q_delrows_by_iden_prop_intval, iden=iden, prop=prop, valu=valu )
else:
return self.delete( self._q_delrows_by_iden_prop_strval, iden=iden, prop=prop, valu=valu )
def _getRowsByIdProp(self, iden, prop, valu=None):
if valu == None:
rows = self.select( self._q_getrows_by_iden_prop, iden=iden, prop=prop)
return self._foldTypeCols(rows)
if s_compat.isint(valu):
rows = self.select( self._q_getrows_by_iden_prop_intval, iden=iden, prop=prop, valu=valu)
return self._foldTypeCols(rows)
else:
rows = self.select( self._q_getrows_by_iden_prop_strval, iden=iden, prop=prop, valu=valu)
return self._foldTypeCols(rows)
def _setRowsByIdProp(self, iden, prop, valu):
if s_compat.isint(valu):
count = self.update( self._q_uprows_by_iden_prop_int, iden=iden, prop=prop, valu=valu )
else:
count = self.update( self._q_uprows_by_iden_prop_str, iden=iden, prop=prop, valu=valu )
if count == 0:
rows = [ (iden,prop,valu,now()), ]
self._addRows(rows)
def _delRowsById(self, iden):
self.delete(self._q_delrows_by_iden, iden=iden)
def _delJoinByProp(self, prop, valu=None, mintime=None, maxtime=None):
self._runPropQuery('deljoinbyprop',prop,valu=valu,mintime=mintime,maxtime=maxtime,meth=self.delete, nolim=True)
def _getJoinByProp(self, prop, valu=None, mintime=None, maxtime=None, limit=None):
rows = self._runPropQuery('joinbyprop',prop,valu=valu,limit=limit,mintime=mintime,maxtime=maxtime)
return self._foldTypeCols(rows)
def _delRowsByProp(self, prop, valu=None, mintime=None, maxtime=None):
self._runPropQuery('delrowsbyprop',prop,valu=valu,mintime=mintime,maxtime=maxtime,meth=self.delete, nolim=True)
|
|
# Copyright (c) 2014 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import six
from six.moves.urllib import parse
import jsonschema
from oslo_log import log as logging
from designate import exceptions
from designate.schema import validators
from designate.schema import format
LOG = logging.getLogger(__name__)
class NotSpecifiedSentinel:
pass
def get_attrname(name):
"""Return the mangled name of the attribute's underlying storage."""
return '_obj_field_%s' % name
def make_class_properties(cls):
"""Build getter and setter methods for all the objects attributes"""
# Prepare an empty dict to gather the merged/final set of fields
fields = {}
# Add each supercls's fields
for supercls in cls.mro()[1:-1]:
if not hasattr(supercls, 'FIELDS'):
continue
fields.update(supercls.FIELDS)
# Add our fields
fields.update(cls.FIELDS)
# Store the results
cls.FIELDS = fields
for field in six.iterkeys(cls.FIELDS):
def getter(self, name=field):
self._obj_check_relation(name)
return getattr(self, get_attrname(name), None)
def setter(self, value, name=field):
if (self.obj_attr_is_set(name) and value != getattr(self, name)
or not self.obj_attr_is_set(name)):
self._obj_changes.add(name)
if (self.obj_attr_is_set(name) and value != getattr(self, name)
and name not in list(six.iterkeys(
self._obj_original_values))):
self._obj_original_values[name] = getattr(self, name)
return setattr(self, get_attrname(name), value)
setattr(cls, field, property(getter, setter))
def _schema_ref_resolver(uri):
"""
Fetches an DesignateObject's schema from a JSON Schema Reference URI
Sample URI: obj://ObjectName#/subpathA/subpathB
"""
obj_name = parse.urlsplit(uri).netloc
obj = DesignateObject.obj_cls_from_name(obj_name)
return obj.obj_get_schema()
def make_class_validator(obj):
schema = {
'$schema': 'http://json-schema.org/draft-04/hyper-schema',
'title': obj.obj_name(),
'description': 'Designate %s Object' % obj.obj_name(),
}
if isinstance(obj, ListObjectMixin):
schema['type'] = 'array',
schema['items'] = make_class_validator(obj.LIST_ITEM_TYPE)
else:
schema['type'] = 'object'
schema['additionalProperties'] = False
schema['required'] = []
schema['properties'] = {}
for name, properties in obj.FIELDS.items():
if properties.get('relation', False):
if obj.obj_attr_is_set(name):
schema['properties'][name] = \
make_class_validator(getattr(obj, name))
else:
schema['properties'][name] = properties.get('schema', {})
if properties.get('required', False):
schema['required'].append(name)
resolver = jsonschema.RefResolver.from_schema(
schema, handlers={'obj': _schema_ref_resolver})
obj._obj_validator = validators.Draft4Validator(
schema, resolver=resolver, format_checker=format.draft4_format_checker)
return schema
class DesignateObjectMetaclass(type):
def __init__(cls, names, bases, dict_):
if not hasattr(cls, '_obj_classes'):
# This means we're working on the base DesignateObject class,
# and can skip the remaining Metaclass functionality
cls._obj_classes = {}
return
make_class_properties(cls)
# Add a reference to the finished class into the _obj_classes
# dictionary, allowing us to lookup classes by their name later - this
# is useful for e.g. referencing another DesignateObject in a
# validation schema.
if cls.obj_name() not in cls._obj_classes:
cls._obj_classes[cls.obj_name()] = cls
else:
raise Exception("Duplicate DesignateObject with name '%(name)s'" %
{'name': cls.obj_name()})
@six.add_metaclass(DesignateObjectMetaclass)
class DesignateObject(object):
FIELDS = {}
def _obj_check_relation(self, name):
if name in self.FIELDS and self.FIELDS[name].get('relation', False):
if not self.obj_attr_is_set(name):
raise exceptions.RelationNotLoaded
@classmethod
def obj_cls_from_name(cls, name):
"""Retrieves a object cls from the registry by name and returns it."""
return cls._obj_classes[name]
@classmethod
def from_primitive(cls, primitive):
"""
Construct an object from primitive types
This is used while deserializing the object.
"""
objcls = cls.obj_cls_from_name(primitive['designate_object.name'])
return objcls._obj_from_primitive(primitive)
@classmethod
def _obj_from_primitive(cls, primitive):
instance = cls()
for field, value in primitive['designate_object.data'].items():
if isinstance(value, dict) and 'designate_object.name' in value:
setattr(instance, field, DesignateObject.from_primitive(value))
else:
setattr(instance, field, value)
instance._obj_changes = set(primitive['designate_object.changes'])
instance._obj_original_values = \
primitive['designate_object.original_values']
return instance
@classmethod
def from_dict(cls, _dict):
instance = cls()
for field, value in _dict.items():
if (field in instance.FIELDS and
instance.FIELDS[field].get('relation', False)):
relation_cls_name = instance.FIELDS[field]['relation_cls']
# We're dealing with a relation, we'll want to create the
# correct object type and recurse
relation_cls = cls.obj_cls_from_name(relation_cls_name)
if isinstance(value, list):
setattr(instance, field, relation_cls.from_list(value))
else:
setattr(instance, field, relation_cls.from_dict(value))
else:
setattr(instance, field, value)
return instance
@classmethod
def from_list(cls, _list):
raise NotImplementedError()
@classmethod
def obj_name(cls):
"""Return a canonical name for this object which will be used over
the wire and in validation schemas.
"""
return cls.__name__
@classmethod
def obj_get_schema(cls):
"""Returns the JSON Schema for this Object."""
return cls._obj_validator.schema
def __init__(self, **kwargs):
self._obj_changes = set()
self._obj_original_values = dict()
for name, value in kwargs.items():
if name in list(six.iterkeys(self.FIELDS)):
setattr(self, name, value)
else:
raise TypeError("__init__() got an unexpected keyword "
"argument '%(name)s'" % {'name': name})
def to_primitive(self):
"""
Convert the object to primitive types so that the object can be
serialized.
NOTE: Currently all the designate objects contain primitive types that
do not need special handling. If this changes we need to modify this
function.
"""
data = {}
for field in six.iterkeys(self.FIELDS):
if self.obj_attr_is_set(field):
if isinstance(getattr(self, field), DesignateObject):
data[field] = getattr(self, field).to_primitive()
else:
data[field] = getattr(self, field)
return {
'designate_object.name': self.obj_name(),
'designate_object.data': data,
'designate_object.changes': sorted(self._obj_changes),
'designate_object.original_values': dict(self._obj_original_values)
}
def to_dict(self):
"""Convert the object to a simple dictionary."""
data = {}
for field in six.iterkeys(self.FIELDS):
if self.obj_attr_is_set(field):
if isinstance(getattr(self, field), ListObjectMixin):
data[field] = getattr(self, field).to_list()
elif isinstance(getattr(self, field), DesignateObject):
data[field] = getattr(self, field).to_dict()
else:
data[field] = getattr(self, field)
return data
def update(self, values):
"""Update a object's fields with the supplied key/value pairs"""
for k, v in values.items():
setattr(self, k, v)
@property
def is_valid(self):
"""Returns True if the Object is valid."""
make_class_validator(self)
return self._obj_validator.is_valid(self.to_dict())
def validate(self):
make_class_validator(self)
# NOTE(kiall): We make use of the Object registry here in order to
# avoid an impossible circular import.
ValidationErrorList = self.obj_cls_from_name('ValidationErrorList')
ValidationError = self.obj_cls_from_name('ValidationError')
values = self.to_dict()
errors = ValidationErrorList()
LOG.debug("Validating '%(name)s' object with values: %(values)r", {
'name': self.obj_name(),
'values': values,
})
for error in self._obj_validator.iter_errors(values):
errors.append(ValidationError.from_js_error(error))
if len(errors) > 0:
raise exceptions.InvalidObject(
"Provided object does not match "
"schema", errors=errors, object=self)
def obj_attr_is_set(self, name):
"""
Return True or False depending of if a particular attribute has had
an attribute's value explicitly set.
"""
return hasattr(self, get_attrname(name))
def obj_what_changed(self):
"""Returns a set of fields that have been modified."""
return set(self._obj_changes)
def obj_get_changes(self):
"""Returns a dict of changed fields and their new values."""
changes = {}
for key in self.obj_what_changed():
changes[key] = getattr(self, key)
return changes
def obj_reset_changes(self, fields=None):
"""Reset the list of fields that have been changed."""
if fields:
self._obj_changes -= set(fields)
for field in fields:
self._obj_original_values.pop(field, None)
else:
self._obj_changes.clear()
self._obj_original_values = dict()
def obj_get_original_value(self, field):
"""Returns the original value of a field."""
if field in list(six.iterkeys(self._obj_original_values)):
return self._obj_original_values[field]
elif self.obj_attr_is_set(field):
return getattr(self, field)
else:
raise KeyError(field)
def __setattr__(self, name, value):
"""Enforces all object attributes are private or well defined"""
if name[0:5] == '_obj_' or name in list(six.iterkeys(self.FIELDS)) \
or name == 'FIELDS':
super(DesignateObject, self).__setattr__(name, value)
else:
raise AttributeError(
"Designate object '%(type)s' has no attribute '%(name)s'" % {
'type': self.obj_name(),
'name': name,
})
def __deepcopy__(self, memodict=None):
"""
Efficiently make a deep copy of this object.
"Efficiently" is used here a relative term, this will be faster
than allowing python to naively deepcopy the object.
"""
memodict = memodict or {}
c_obj = self.__class__()
for field in six.iterkeys(self.FIELDS):
if self.obj_attr_is_set(field):
c_field = copy.deepcopy(getattr(self, field), memodict)
setattr(c_obj, field, c_field)
c_obj._obj_changes = set(self._obj_changes)
return c_obj
def __eq__(self, other):
if self.__class__ != other.__class__:
return False
return self.to_primitive() == other.to_primitive()
def __ne__(self, other):
return not(self.__eq__(other))
class DictObjectMixin(object):
"""
Mixin to allow DesignateObjects to behave like dictionaries
Eventually, this should be removed as other code is updated to use object
rather than dictionary accessors.
"""
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
setattr(self, key, value)
def __contains__(self, item):
return item in list(six.iterkeys(self.FIELDS))
def get(self, key, default=NotSpecifiedSentinel):
if key not in list(six.iterkeys(self.FIELDS)):
raise AttributeError("'%s' object has no attribute '%s'" % (
self.__class__, key))
if default != NotSpecifiedSentinel and not self.obj_attr_is_set(key):
return default
else:
return getattr(self, key)
def iteritems(self):
for field in six.iterkeys(self.FIELDS):
if self.obj_attr_is_set(field):
yield field, getattr(self, field)
def __iter__(self):
for field in six.iterkeys(self.FIELDS):
if self.obj_attr_is_set(field):
yield field, getattr(self, field)
items = lambda self: list(self.items())
class ListObjectMixin(object):
"""Mixin to allow DesignateObjects to behave like python lists."""
FIELDS = {
'objects': {
'relation': True
}
}
LIST_ITEM_TYPE = DesignateObject
@classmethod
def _obj_from_primitive(cls, primitive):
instance = cls()
for field, value in primitive['designate_object.data'].items():
if field == 'objects':
instance.objects = [DesignateObject.from_primitive(v) for v in
value]
elif isinstance(value, dict) and 'designate_object.name' in value:
setattr(instance, field, DesignateObject.from_primitive(value))
else:
setattr(instance, field, value)
instance._obj_changes = set(primitive['designate_object.changes'])
instance._obj_original_values = \
primitive['designate_object.original_values']
return instance
@classmethod
def from_list(cls, _list):
instance = cls()
for item in _list:
instance.append(cls.LIST_ITEM_TYPE.from_dict(item))
return instance
def to_list(self):
list_ = []
for item in self.objects:
if isinstance(item, ListObjectMixin):
list_.append(item.to_list())
elif isinstance(item, DesignateObject):
list_.append(item.to_dict())
else:
list_.append(item)
return list_
def __init__(self, *args, **kwargs):
super(ListObjectMixin, self).__init__(*args, **kwargs)
if 'objects' not in kwargs:
self.objects = []
self.obj_reset_changes(['objects'])
def to_primitive(self):
data = {}
for field in six.iterkeys(self.FIELDS):
if self.obj_attr_is_set(field):
if field == 'objects':
data[field] = [o.to_primitive() for o in self.objects]
elif isinstance(getattr(self, field), DesignateObject):
data[field] = getattr(self, field).to_primitive()
else:
data[field] = getattr(self, field)
return {
'designate_object.name': self.obj_name(),
'designate_object.data': data,
'designate_object.changes': list(self._obj_changes),
'designate_object.original_values': dict(self._obj_original_values)
}
def __iter__(self):
"""List iterator interface"""
return iter(self.objects)
def __len__(self):
"""List length"""
return len(self.objects)
def __getitem__(self, index):
"""List index access"""
if isinstance(index, slice):
new_obj = self.__class__()
new_obj.objects = self.objects[index]
new_obj.obj_reset_changes()
return new_obj
return self.objects[index]
def __setitem__(self, index, value):
"""Set list index value"""
self.objects[index] = value
def __contains__(self, value):
"""List membership test"""
return value in self.objects
def append(self, value):
"""Append a value to the list"""
return self.objects.append(value)
def extend(self, values):
"""Extend the list by appending all the items in the given list"""
return self.objects.extend(values)
def pop(self, index):
"""Pop a value from the list"""
return self.objects.pop(index)
def insert(self, index, value):
"""Insert a value into the list at the given index"""
return self.objects.insert(index, value)
def remove(self, value):
"""Remove a value from the list"""
return self.objects.remove(value)
def index(self, value):
"""List index of value"""
return self.objects.index(value)
def count(self, value):
"""List count of value occurrences"""
return self.objects.count(value)
def sort(self, cmp=None, key=None, reverse=False):
self.objects.sort(cmp=cmp, key=key, reverse=reverse)
def obj_what_changed(self):
changes = set(self._obj_changes)
for item in self.objects:
if item.obj_what_changed():
changes.add('objects')
return changes
class PersistentObjectMixin(object):
"""
Mixin class for Persistent objects.
This adds the fields that we use in common for all persistent objects.
"""
FIELDS = {
'id': {
'schema': {
'type': 'string',
'format': 'uuid',
},
'read_only': True
},
'created_at': {
'schema': {
'type': 'string',
'format': 'date-time',
},
'read_only': True
},
'updated_at': {
'schema': {
'type': ['string', 'null'],
'format': 'date-time',
},
'read_only': True
},
'version': {
'schema': {
'type': 'integer',
},
'read_only': True
}
}
class SoftDeleteObjectMixin(object):
"""
Mixin class for Soft-Deleted objects.
This adds the fields that we use in common for all soft-deleted objects.
"""
FIELDS = {
'deleted': {
'schema': {
'type': ['string', 'integer'],
},
'read_only': True
},
'deleted_at': {
'schema': {
'type': ['string', 'null'],
'format': 'date-time',
},
'read_only': True
}
}
class PagedListObjectMixin(object):
"""
Mixin class for List objects.
This adds fields that would populate API metadata for collections.
"""
FIELDS = {
'total_count': {
'schema': {
'type': ['integer'],
}
}
}
|
|
from device import Device
from packet import RoutingPacket
from event import Event
from blackwidow.network.rate_graph import Rate_Graph
ROUTING_PKT_ID = 'Routing Packet'
class Router(Device):
"""Class for routers.
Routers are responsible for initializing and updating their
routing table, and sending packets based on their routing table.
Parameters
----------
router_id : string
A unique id for the router.
Attributes
----------
network_id : string
A unique id of the device in the network.
links : list
A list of links that the router is connected to.
routing_table : dict
A dictionary representing the router's routing table.
new_routing_table : dict
A dictionary representing the router's new routing table.
env : `Network`
The network that the link belongs to.
bw : `Blackwidow`
BlackWidow simulation object containing simulation settings.
send_rate : Rate_Graph object
Send rate graphing object.
receive_rate : Rate_Graph object
Receive rate graphing object.
Methods
-------
add_link(link)
Adds a link to the router.
send(packet)
Sends a packet to a link.
receive(packet)
Receives a packet from a link.
start_new_routing()
Starts a new routing round.
send_routing()
Sends a routing packet to all neighbors.
update_route()
Update the new_routing_table based on routing packets.
_distance(link)
Gets the distance of a link.
"""
def __init__(self, router_id, env, bw):
"""Constructor for Router class."""
super(Router, self).__init__(router_id)
self.env = env
self.bw = bw
self._routing_table = {}
self._new_routing_table = {}
self._send_rate = Rate_Graph(router_id,
"router {0} send rate".format(router_id),
self.env,
self.bw)
self._receive_rate = Rate_Graph(router_id,
"router {0} receive"
" rate".format(router_id),
self.env,
self.bw)
self.env.add_event(Event("{} sent routing"
" packet".format(self._network_id),
self._network_id,
self.start_new_routing),
0)
def add_link(self, link):
"""Overrides Device.add_link() to add to routing table.
Parameters
----------
link : Link
The link to add to the router.
"""
self._links.append(link)
network_id = link._device_a.network_id
if (network_id == self._network_id):
network_id = link._device_b.network_id
self._routing_table[network_id] = {'link': link,
'distance': self._distance(link)}
self._new_routing_table[network_id] = \
{'link': link, 'distance': self._distance(link)}
def send(self, packet):
"""Send packet to appropriate link.
First looks in the new routing table to see if we know how to reach
it there. Otherwise uses the old routing table.
Parameters
----------
packet : Packet
Packet to send through the router.
"""
route = None
self._send_rate.add_point(packet, self.env.time)
if packet.dest.network_id in self._new_routing_table:
route = self._new_routing_table[packet.dest.network_id]
elif packet.dest.network_id in self._routing_table:
route = self._routing_table[packet.dest.network_id]
if route is not None and 'link' in route:
route['link'].receive(packet, self._network_id)
def receive(self, packet):
"""Process packet by sending it out.
If the packet is routing, calls update_route to update the
new_routing_table.
Parameters
----------
packet : Packet
Received packet.
"""
self._receive_rate.add_point(packet, self.env.time)
if packet.is_routing:
self.update_route(packet)
print "{} received routing packet from {}".format(self._network_id,
packet.src)
else:
self.send(packet)
def start_new_routing(self):
"""Start a new routing round.
If there is dynamic routing, updates the routing table to the new
routing table built up by dynamic routing and measures the distance
for each link.
"""
# Reset routing table if dynamic routing.
if not self.bw.static_routing:
self._new_routing_table = {}
for link in self._links:
link.measure_distance()
network_id = link._device_a.network_id
if (network_id == self._network_id):
network_id = link._device_b.network_id
self._new_routing_table[network_id] = \
{'link': link, 'distance': self._distance(link)}
self._routing_table = self._new_routing_table
if self.env.time < 500:
self.env.add_event(Event("{} reset its routing"
" table.".format(self._network_id),
self._network_id,
self.start_new_routing),
10)
else:
self.env.add_event(Event("{} reset its routing"
" table.".format(self._network_id),
self._network_id,
self.start_new_routing),
5000)
self.send_routing()
def send_routing(self):
"""Send routing packets to all neighbors."""
for link in self._links:
other_device = link._device_a
if (other_device.network_id == self._network_id):
other_device = link.device_b
if type(other_device) is Router:
packet = RoutingPacket(ROUTING_PKT_ID, self._network_id,
other_device.network_id, None,
self._new_routing_table,
self.bw.routing_packet_size)
link.receive(packet, self._network_id)
print "Sent routing packet from {}".format(self._network_id)
def update_route(self, packet):
"""Update routing table.
Goes through the routing table contained in the routing packet and
determines if it contains a better way to get to each destination.
This uses a distributed version of the Bellman-Ford algorithm.
Parameters
----------
packet : Packet
Routing packet to update the route.
"""
link = None
if packet.src in self._new_routing_table:
route = self._new_routing_table[packet.src]
if 'link' in route:
link = route['link']
else:
raise ValueError('{} not found in {} \'s routing table.'.format(
packet.src, self._network_id))
route_changed = False
for dest, route in packet.routing_table.items():
distance = route['distance'] + link.distance
if dest not in self._new_routing_table:
self._new_routing_table[dest] = {'link': link,
'distance': distance}
route_changed = True
elif distance < self._new_routing_table[dest]['distance']:
self._new_routing_table[dest] = {'link': link,
'distance': distance}
route_changed = True
if route_changed:
self.send_routing()
def _distance(self, link):
"""Get the distance of the link.
Parameters
----------
link : Link
Link to get distance of.
"""
distance = link.delay + link.get_buffer_size() / float(link.rate)
if self.bw.static_routing:
distance = link.delay
return distance
|
|
"""
Manage Dell DRAC from the Master
The login credentials need to be configured in the Salt master
configuration file.
.. code-block:: yaml
drac:
username: admin
password: secret
"""
import logging
try:
import paramiko
HAS_PARAMIKO = True
except ImportError:
HAS_PARAMIKO = False
log = logging.getLogger(__name__)
def __virtual__():
if HAS_PARAMIKO:
return True
return (
False,
"The drac runner module cannot be loaded: paramiko package is not installed.",
)
def __connect(hostname, timeout=20, username=None, password=None):
"""
Connect to the DRAC
"""
drac_cred = __opts__.get("drac")
err_msg = (
"No drac login credentials found. Please add the 'username' and 'password' "
"fields beneath a 'drac' key in the master configuration file. Or you can "
"pass in a username and password as kwargs at the CLI."
)
if not username:
if drac_cred is None:
log.error(err_msg)
return False
username = drac_cred.get("username", None)
if not password:
if drac_cred is None:
log.error(err_msg)
return False
password = drac_cred.get("password", None)
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect(hostname, username=username, password=password, timeout=timeout)
except Exception as e: # pylint: disable=broad-except
log.error("Unable to connect to %s: %s", hostname, e)
return False
return client
def __version(client):
"""
Grab DRAC version
"""
versions = {
9: "CMC",
8: "iDRAC6",
10: "iDRAC6",
11: "iDRAC6",
16: "iDRAC7",
17: "iDRAC7",
}
if isinstance(client, paramiko.SSHClient):
(stdin, stdout, stderr) = client.exec_command("racadm getconfig -g idRacInfo")
for i in stdout.readlines():
if i[2:].startswith("idRacType"):
return versions.get(int(i[2:].split("=")[1]), None)
return None
def pxe(hostname, timeout=20, username=None, password=None):
"""
Connect to the Dell DRAC and have the boot order set to PXE
and power cycle the system to PXE boot
CLI Example:
.. code-block:: bash
salt-run drac.pxe example.com
"""
_cmds = [
"racadm config -g cfgServerInfo -o cfgServerFirstBootDevice pxe",
"racadm config -g cfgServerInfo -o cfgServerBootOnce 1",
"racadm serveraction powercycle",
]
client = __connect(hostname, timeout, username, password)
if isinstance(client, paramiko.SSHClient):
for i, cmd in enumerate(_cmds, 1):
log.info("Executing command %s", i)
(stdin, stdout, stderr) = client.exec_command(cmd)
if "successful" in stdout.readline():
log.info("Executing command: %s", cmd)
else:
log.error("Unable to execute: %s", cmd)
return False
return True
def reboot(hostname, timeout=20, username=None, password=None):
"""
Reboot a server using the Dell DRAC
CLI Example:
.. code-block:: bash
salt-run drac.reboot example.com
"""
client = __connect(hostname, timeout, username, password)
if isinstance(client, paramiko.SSHClient):
(stdin, stdout, stderr) = client.exec_command("racadm serveraction powercycle")
if "successful" in stdout.readline():
log.info("powercycle successful")
else:
log.error("powercycle racadm command failed")
return False
else:
log.error("client was not of type paramiko.SSHClient")
return False
return True
def poweroff(hostname, timeout=20, username=None, password=None):
"""
Power server off
CLI Example:
.. code-block:: bash
salt-run drac.poweroff example.com
"""
client = __connect(hostname, timeout, username, password)
if isinstance(client, paramiko.SSHClient):
(stdin, stdout, stderr) = client.exec_command("racadm serveraction powerdown")
if "successful" in stdout.readline():
log.info("powerdown successful")
else:
log.error("powerdown racadm command failed")
return False
else:
log.error("client was not of type paramiko.SSHClient")
return False
return True
def poweron(hostname, timeout=20, username=None, password=None):
"""
Power server on
CLI Example:
.. code-block:: bash
salt-run drac.poweron example.com
"""
client = __connect(hostname, timeout, username, password)
if isinstance(client, paramiko.SSHClient):
(stdin, stdout, stderr) = client.exec_command("racadm serveraction powerup")
if "successful" in stdout.readline():
log.info("powerup successful")
else:
log.error("powerup racadm command failed")
return False
else:
log.error("client was not of type paramiko.SSHClient")
return False
return True
def version(hostname, timeout=20, username=None, password=None):
"""
Display the version of DRAC
CLI Example:
.. code-block:: bash
salt-run drac.version example.com
"""
return __version(__connect(hostname, timeout, username, password))
|
|
import pytest
pytest.importorskip("numpy")
import numpy as np
import dask
import dask.array as da
from dask.array.core import getter, getter_nofancy
from dask.array.optimization import (
fuse_slice,
getitem,
optimize,
optimize_blockwise,
optimize_slices,
)
from dask.array.utils import assert_eq
from dask.highlevelgraph import HighLevelGraph
from dask.optimization import fuse
from dask.utils import SerializableLock
def test_fuse_getitem():
pairs = [
(
(getter, (getter, "x", slice(1000, 2000)), slice(15, 20)),
(getter, "x", slice(1015, 1020)),
),
(
(
getitem,
(getter, "x", (slice(1000, 2000), slice(100, 200))),
(slice(15, 20), slice(50, 60)),
),
(getter, "x", (slice(1015, 1020), slice(150, 160))),
),
(
(
getitem,
(getter_nofancy, "x", (slice(1000, 2000), slice(100, 200))),
(slice(15, 20), slice(50, 60)),
),
(getter_nofancy, "x", (slice(1015, 1020), slice(150, 160))),
),
((getter, (getter, "x", slice(1000, 2000)), 10), (getter, "x", 1010)),
(
(getitem, (getter, "x", (slice(1000, 2000), 10)), (slice(15, 20),)),
(getter, "x", (slice(1015, 1020), 10)),
),
(
(getitem, (getter_nofancy, "x", (slice(1000, 2000), 10)), (slice(15, 20),)),
(getter_nofancy, "x", (slice(1015, 1020), 10)),
),
(
(getter, (getter, "x", (10, slice(1000, 2000))), (slice(15, 20),)),
(getter, "x", (10, slice(1015, 1020))),
),
(
(
getter,
(getter, "x", (slice(1000, 2000), slice(100, 200))),
(slice(None, None), slice(50, 60)),
),
(getter, "x", (slice(1000, 2000), slice(150, 160))),
),
(
(getter, (getter, "x", (None, slice(None, None))), (slice(None, None), 5)),
(getter, "x", (None, 5)),
),
(
(
getter,
(getter, "x", (slice(1000, 2000), slice(10, 20))),
(slice(5, 10),),
),
(getter, "x", (slice(1005, 1010), slice(10, 20))),
),
(
(
getitem,
(getitem, "x", (slice(1000, 2000),)),
(slice(5, 10), slice(10, 20)),
),
(getitem, "x", (slice(1005, 1010), slice(10, 20))),
),
(
(getter, (getter, "x", slice(1000, 2000), False, False), slice(15, 20)),
(getter, "x", slice(1015, 1020)),
),
(
(getter, (getter, "x", slice(1000, 2000)), slice(15, 20), False, False),
(getter, "x", slice(1015, 1020)),
),
(
(
getter,
(getter_nofancy, "x", slice(1000, 2000), False, False),
slice(15, 20),
False,
False,
),
(getter_nofancy, "x", slice(1015, 1020), False, False),
),
]
for inp, expected in pairs:
result = optimize_slices({"y": inp})
assert result == {"y": expected}
def test_fuse_getitem_lock():
lock1 = SerializableLock()
lock2 = SerializableLock()
pairs = [
(
(getter, (getter, "x", slice(1000, 2000), True, lock1), slice(15, 20)),
(getter, "x", slice(1015, 1020), True, lock1),
),
(
(
getitem,
(getter, "x", (slice(1000, 2000), slice(100, 200)), True, lock1),
(slice(15, 20), slice(50, 60)),
),
(getter, "x", (slice(1015, 1020), slice(150, 160)), True, lock1),
),
(
(
getitem,
(
getter_nofancy,
"x",
(slice(1000, 2000), slice(100, 200)),
True,
lock1,
),
(slice(15, 20), slice(50, 60)),
),
(getter_nofancy, "x", (slice(1015, 1020), slice(150, 160)), True, lock1),
),
(
(
getter,
(getter, "x", slice(1000, 2000), True, lock1),
slice(15, 20),
True,
lock2,
),
(
getter,
(getter, "x", slice(1000, 2000), True, lock1),
slice(15, 20),
True,
lock2,
),
),
]
for inp, expected in pairs:
result = optimize_slices({"y": inp})
assert result == {"y": expected}
def test_optimize_with_getitem_fusion():
dsk = {
"a": "some-array",
"b": (getter, "a", (slice(10, 20), slice(100, 200))),
"c": (getter, "b", (5, slice(50, 60))),
}
result = optimize(dsk, ["c"])
expected_task = (getter, "some-array", (15, slice(150, 160)))
assert any(v == expected_task for v in result.values())
assert len(result) < len(dsk)
def test_optimize_slicing():
dsk = {
"a": (range, 10),
"b": (getter, "a", (slice(None, None, None),)),
"c": (getter, "b", (slice(None, None, None),)),
"d": (getter, "c", (slice(0, 5, None),)),
"e": (getter, "d", (slice(None, None, None),)),
}
expected = {"e": (getter, (range, 10), (slice(0, 5, None),))}
result = optimize_slices(fuse(dsk, [], rename_keys=False)[0])
assert result == expected
# protect output keys
expected = {
"c": (getter, (range, 10), (slice(0, None, None),)),
"d": (getter, "c", (slice(0, 5, None),)),
"e": (getter, "d", (slice(None, None, None),)),
}
result = optimize_slices(fuse(dsk, ["c", "d", "e"], rename_keys=False)[0])
assert result == expected
def test_fuse_slice():
assert fuse_slice(slice(10, 15), slice(0, 5, 2)) == slice(10, 15, 2)
assert fuse_slice((slice(100, 200),), (None, slice(10, 20))) == (
None,
slice(110, 120),
)
assert fuse_slice((slice(100, 200),), (slice(10, 20), None)) == (
slice(110, 120),
None,
)
assert fuse_slice((1,), (None,)) == (1, None)
assert fuse_slice((1, slice(10, 20)), (None, None, 3, None)) == (
1,
None,
None,
13,
None,
)
with pytest.raises(NotImplementedError):
fuse_slice(slice(10, 15, 2), -1)
# Regression test for #3076
with pytest.raises(NotImplementedError):
fuse_slice(None, np.array([0, 0]))
def test_fuse_slice_with_lists():
assert fuse_slice(slice(10, 20, 2), [1, 2, 3]) == [12, 14, 16]
assert fuse_slice([10, 20, 30, 40, 50], [3, 1, 2]) == [40, 20, 30]
assert fuse_slice([10, 20, 30, 40, 50], 3) == 40
assert fuse_slice([10, 20, 30, 40, 50], -1) == 50
assert fuse_slice([10, 20, 30, 40, 50], slice(1, None, 2)) == [20, 40]
assert fuse_slice(
(slice(None), slice(0, 10), [1, 2, 3]), (slice(None), slice(1, 5), slice(None))
) == (slice(0, None), slice(1, 5), [1, 2, 3])
assert fuse_slice(
(slice(None), slice(None), [1, 2, 3]), (slice(None), slice(1, 5), 1)
) == (slice(0, None), slice(1, 5), 2)
def test_nonfusible_fancy_indexing():
nil = slice(None)
cases = [ # x[:, list, :][int, :, :]
((nil, [1, 2, 3], nil), (0, nil, nil)),
# x[int, :, :][:, list, :]
((0, nil, nil), (nil, [1, 2, 3], nil)),
# x[:, list, :, :][:, :, :, int]
((nil, [1, 2], nil, nil), (nil, nil, nil, 0)),
]
for a, b in cases:
with pytest.raises(NotImplementedError):
fuse_slice(a, b)
def test_hard_fuse_slice_cases():
dsk = {
"x": (getter, (getter, "x", (None, slice(None, None))), (slice(None, None), 5))
}
assert optimize_slices(dsk) == {"x": (getter, "x", (None, 5))}
def test_dont_fuse_numpy_arrays():
x = np.ones(10)
for chunks in [(5,), (10,)]:
y = da.from_array(x, chunks=(10,))
dsk = y.__dask_optimize__(y.dask, y.__dask_keys__())
assert sum(isinstance(v, np.ndarray) for v in dsk.values()) == 1
def test_minimize_data_transfer():
zarr = pytest.importorskip("zarr")
x = zarr.ones((100,))
y = da.from_array(x, chunks=25)
z = y + 1
dsk = z.__dask_optimize__(z.dask, z.__dask_keys__())
keys = list(dsk)
results = dask.get(dsk, keys)
big_key = [k for k, r in zip(keys, results) if r is x][0]
dependencies, dependents = dask.core.get_deps(dsk)
deps = dependents[big_key]
assert len(deps) == 4
for dep in deps:
assert dsk[dep][0] in (getitem, getter)
assert dsk[dep][1] == big_key
def test_fuse_slices_with_alias():
dsk = {
"x": np.arange(16).reshape((4, 4)),
("dx", 0, 0): (getter, "x", (slice(0, 4), slice(0, 4))),
("alias", 0, 0): ("dx", 0, 0),
("dx2", 0): (getitem, ("alias", 0, 0), (slice(None), 0)),
}
keys = [("dx2", 0)]
dsk2 = optimize(dsk, keys)
assert len(dsk2) == 3
fused_key = (dsk2.keys() - {"x", ("dx2", 0)}).pop()
assert dsk2[fused_key] == (getter, "x", (slice(0, 4), 0))
def test_dont_fuse_fancy_indexing_in_getter_nofancy():
dsk = {
"a": (
getitem,
(getter_nofancy, "x", (slice(10, 20, None), slice(100, 200, None))),
([1, 3], slice(50, 60, None)),
)
}
assert optimize_slices(dsk) == dsk
dsk = {"a": (getitem, (getter_nofancy, "x", [1, 2, 3]), 0)}
assert optimize_slices(dsk) == dsk
@pytest.mark.parametrize("chunks", [10, 5, 3])
def test_fuse_getter_with_asarray(chunks):
x = np.ones(10) * 1234567890
y = da.ones(10, chunks=chunks)
z = x + y
dsk = z.__dask_optimize__(z.dask, z.__dask_keys__())
assert any(v is x for v in dsk.values())
for v in dsk.values():
s = str(v)
assert s.count("getitem") + s.count("getter") <= 1
if v is not x:
assert "1234567890" not in s
n_getters = len([v for v in dsk.values() if v[0] in (getitem, getter)])
if y.npartitions > 1:
assert n_getters == y.npartitions
else:
assert n_getters == 0
assert_eq(z, x + 1)
@pytest.mark.parametrize(
"get,remove", [(getter, False), (getter_nofancy, False), (getitem, True)]
)
def test_remove_no_op_slices_if_get_is_not_getter_or_getter_nofancy(get, remove):
# Test that no-op slices are removed as long as get is not getter or
# getter_nofancy. This ensures that `get` calls are always made in all
# tasks created by `from_array`, even after optimization
null = slice(0, None)
opts = [
(
(get, "x", null, False, False),
"x" if remove else (get, "x", null, False, False),
),
(
(getitem, (get, "x", null, False, False), null),
"x" if remove else (get, "x", null, False, False),
),
(
(getitem, (get, "x", (null, null), False, False), ()),
"x" if remove else (get, "x", (null, null), False, False),
),
]
for orig, final in opts:
assert optimize_slices({"a": orig}) == {"a": final}
@pytest.mark.xfail(reason="blockwise fusion does not respect this, which is ok")
def test_turn_off_fusion():
x = da.ones(10, chunks=(5,))
y = da.sum(x + 1 + 2 + 3)
a = y.__dask_optimize__(y.dask, y.__dask_keys__())
with dask.config.set({"optimization.fuse.ave-width": 0}):
b = y.__dask_optimize__(y.dask, y.__dask_keys__())
assert dask.get(a, y.__dask_keys__()) == dask.get(b, y.__dask_keys__())
assert len(a) < len(b)
def test_disable_lowlevel_fusion():
"""Check that by disabling fusion, the HLG survives through optimizations"""
with dask.config.set({"optimization.fuse.active": False}):
y = da.ones(3, chunks=(3,), dtype="int")
optimize = y.__dask_optimize__
dsk1 = y.__dask_graph__()
dsk2 = optimize(dsk1, y.__dask_keys__())
assert isinstance(dsk1, HighLevelGraph)
assert isinstance(dsk2, HighLevelGraph)
assert dsk1 == dsk2
y = y.persist()
assert isinstance(y.__dask_graph__(), HighLevelGraph)
assert_eq(y, [1] * 3)
def test_array_creation_blockwise_fusion():
"""
Check that certain array creation routines work with blockwise and can be
fused with other blockwise operations.
"""
x = da.ones(3, chunks=(3,))
y = da.zeros(3, chunks=(3,))
z = da.full(3, fill_value=2, chunks=(3,))
a = x + y + z
dsk1 = a.__dask_graph__()
assert len(dsk1) == 5
dsk2 = optimize_blockwise(dsk1)
assert len(dsk2) == 1
assert_eq(a, np.full(3, 3.0))
def test_gh3937():
# test for github issue #3937
x = da.from_array([1, 2, 3.0], (2,))
x = da.concatenate((x, [x[-1]]))
y = x.rechunk((2,))
# This will produce Integral type indices that are not ints (np.int64), failing
# the optimizer
y = da.coarsen(np.sum, y, {0: 2})
# How to trigger the optimizer explicitly?
y.compute()
def test_double_dependencies():
x = np.arange(56).reshape((7, 8))
d = da.from_array(x, chunks=(4, 4))
X = d + 1
X = da.dot(X, X.T)
assert_eq(X.compute(optimize_graph=False), X)
def test_fuse_roots():
x = da.ones(10, chunks=(2,))
y = da.zeros(10, chunks=(2,))
z = (x + 1) + (2 * y ** 2)
(zz,) = dask.optimize(z)
# assert len(zz.dask) == 5
assert sum(map(dask.istask, zz.dask.values())) == 5 # there are some aliases
assert_eq(zz, z)
def test_fuse_roots_annotations():
x = da.ones(10, chunks=(2,))
y = da.zeros(10, chunks=(2,))
with dask.annotate(foo="bar"):
y = y ** 2
z = (x + 1) + (2 * y)
hlg = dask.blockwise.optimize_blockwise(z.dask)
assert len(hlg.layers) == 3
assert {"foo": "bar"} in [l.annotations for l in hlg.layers.values()]
za = da.Array(hlg, z.name, z.chunks, z.dtype)
assert_eq(za, z)
|
|
"""SPSSbench module for timing command sets
Note that it requires the Pywin32 extensions, which can be found here
http://sourceforge.net/projects/pywin32/"""
#Licensed Materials - Property of IBM
#IBM SPSS Products: Statistics General
#(c) Copyright IBM Corp. 2011, 2020
#US Government Users Restricted Rights - Use, duplication or disclosure
#restricted by GSA ADP Schedule Contract with IBM Corp.
# Copyright (C) 2005 by SPSS Inc.
helptext = r"""STATS BENCHMRK CMDSET1=filespec [CMDSET2=filespec]
NUMREP=n [PROCESSES=process name list]
/OUTPUT OUTFILE=csv-filespec [STATISTICS=list of statistics]
[/HELP].
Benchmark one or two sets of syntax and report selected resource usage
statistics.
Example:
STATS BENCHMRK CMDSET1="C:\jobs\insert1.sps"
NUMREP=3 PROCESSES=stats spssengine startx startx32
/OUTPUT OUTFILE="c:\temp\bench.csv"
STATISTICS=UserTime KernelTime PageFaultCount PeakWorkingSetSize WorkingSetSize.
This command requires the Python for Windows extensions by Mark Hammond
available from
http://sourceforge.net/projects/pywin32/
Be sure to get the version appropriate for the version of Python required by
Statistics.
CMDSET1 and CMDSET2 specify syntax files to be run repeatedly in
alternation. CMDSET2 is optional. Using two command sets is
useful when you have two alternative versions of syntax for the same
task and want to determine which is more efficient.
NUMREP specifies how many times to run the cmdsets. For each rep, CMDSET1
is run and then, if given, CMDSET2 is run.
PROCESSES specifies one or more processes to monitor.
Statistics are recorded for each specified process associated with Statistics.
The process names may vary with the Statistics version. For V20, they are
stats - the Statistics frontend
spssengine - the Statistics backend
startx - the Python process
startx32 - the R process.
The R process must be started before running this command if it is to be
monitored.
OUTFILE names a csv file to contain the benchmark statistics. Each case
includes process, command set, and repetition identifiers along with each
statistic measured at the start and end of the syntax execution.
STATISTICS lists the statistics to be collected. Use ALL to get every one.
Otherwise select from this list. See the Task Manager process help for
definitions.
times:
CreationTime, UserTime, KernelTime
Creation time seems often not to be meaningful.
memory:
QuotaPagedPoolUsage, QuotaPeakPagedPoolUsage, QuotaNonPagedPoolUsage,
PageFaultCount, PeakWorkingSetSize, PeakPagefileUsage, QuotaPeakNonPagedPoolUsage,
PagefileUsage, WorkingSetSize
i/o:
WriteOperationCount,WriteTransferCount, OtherOperationCount,
OtherTransferCount, ReadOperationCount, ReadTransferCount
/HELP displays this text and does nothing else.
"""
__author__ = 'spss'
__version__= '2.0.1'
# for recent versions of Statistics
from extension import Template, Syntax, processcmd
import spss
import time, re
try:
from win32process import GetCurrentProcess, GetProcessMemoryInfo, GetProcessTimes, GetProcessIoCounters
import win32api, win32pdhutil, win32con
except:
raise SystemError(_("This module requires the Python for Windows extensions. It can be downloaded from http://sourceforge.net/projects/pywin32/"))
# process handle can be found with
# hdl=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, 0, pid)
# win32pdhutil.FindPerformanceAttributesByName("spssengine") returns pid
# i/o: win32process.GetProcessIoCounters(hdl) returns dict of io information
# memory: win32process.GetProcessMemoryInfo(hdl)
# win32process.GetProcessWorkingSetSize(hdl) (returns duple)
# win32api.CloseHandle(hdl)
class benchstats(object):
"""This class handles benchmark measures and snapshots. It is Windows specific.
It monitors the selected Statistics processes, so any overhead from the
monitoring will be included in the measures.
"""
memory = [
'QuotaPagedPoolUsage', 'QuotaPeakPagedPoolUsage', 'QuotaNonPagedPoolUsage',
'PageFaultCount', 'PeakWorkingSetSize', 'PeakPagefileUsage', 'QuotaPeakNonPagedPoolUsage',
'PagefileUsage', 'WorkingSetSize']
time = [
'CreationTime', 'UserTime', 'KernelTime']
io = [
'WriteOperationCount','WriteTransferCount', 'OtherOperationCount',
'OtherTransferCount', 'ReadOperationCount', 'ReadTransferCount']
apinames = [GetProcessMemoryInfo, GetProcessTimes,GetProcessIoCounters]
apilist = [memory, time, io] # list of lists
def __init__(self, processes, stats=None):
if stats is None or "all" in stats:
self.stats = benchstats.time + benchstats.memory + benchstats.io
else:
self.stats = stats
# readings will be a list of lists of readings: one list for each process
self.readings = []
self.measures = ["time"]
self.apis = [time]
# build list of calls for specified measures
# must match ignoring case but set the cased version of the statistic name
for s in self.stats:
for i, api in enumerate(benchstats.apilist):
cased_s = caselessin(s, api)
if cased_s:
self.measures.append(cased_s)
self.apis.append(benchstats.apinames[i])
break
else:
raise ValueError(_("Invalid measure: %s") % s)
# find the processes to monitor - they must already exist
self.handles = []
self.procfound = []
self.processes = []
for p in processes:
# this api is slow
pnum = win32pdhutil.FindPerformanceAttributesByName(p)
if len(pnum) > 1 and str(p).lower() in ["stats", "spssengine", "statisticsb"]:
raise SystemError(_("""There are multiple instances of Statistics running. Only one can be running when monitoring: %s""" % p))
for instance in pnum:
self.handles.append(win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, 0, instance))
pid = p + "-" + str(instance)
self.processes.append(pid)
self.readings.append([])
if len(self.processes) == 0:
raise ValueError(_("""No instances of any of the specified processes were found"""))
else:
print("Processes to be monitored: " + ",".join(self.processes))
def snap(self, rep, group):
"""record new set of statistics measures for designated group and selected processes
One record for each process monitored
rep is the repetition number
group is the cmdset number (1 or 2)
"""
for j, p in enumerate(self.processes):
r = [group, rep]
for i in range(len(self.measures)):
if self.measures[i] == "time":
r.append(time.time()) # current time in seconds
else:
r.append(float(self.apis[i](self.handles[j])[self.measures[i]]))
self.readings[j].append((p, r))
def getrow(self, mergecount=1):
"""generator for sets of snap measures. It yields the next complete row of snaps.
mergecount specifies the number of
snaps to be combined to make a complete observation. If snapping before and after some
events, for example, mergecount should be 2."""
for pn in range(len(self.processes)):
for rows in range(0, len(self.readings[pn]), mergecount):
row = str(self.readings[pn][rows][0])+ " " \
+ str([self.readings[pn][rows+i][1:] for i in range(mergecount)])
yield re.sub(u"[][(),L]", "", row) + "\n"
def save(self, filespec, sets=None):
"""write measures to specified file in a form suitable for reading into Statistics.
sets is a list of strings to append to the generated variable names. Its length
determines how readings are paired. If the length is two, for example, there will
be 2 times the number of statistics in each row with the names suffixed by the
strings given. The size of sets determines how many (sequential) snaps are
concatenated to give each output record.
For example:
bstat.save(file, sets=["Start", "Stop"])
would save a set of before and after measures for the selected statistics with
Start and Stop appended to the pairs of variable names.
"""
if sets is None:
sets = [""]
for p in self.handles:
win32api.CloseHandle(p)
f = open(filespec, "w")
# construct variable names heading
namelist = ["Process"]
for g, s in enumerate(sets):
namelist.append("Cmdset" + str(g))
namelist.append("Repetition" + str(g))
for i in range(len(self.measures)):
namelist.append(self.measures[i] + s)
f.write(" ".join(namelist)+"\n")
for row in self.getrow(mergecount=len(sets)):
f.write(row)
f.close()
def caselessin(needle, haystack):
"""Find needle in haystack ignoring case and return haystack item or None
needle is the item to find
haystack is a list of matches"""
needle = str(needle).lower()
for item in haystack:
if needle == str(item).lower():
return item
else:
return None
def benchmark(outfile, cmdset1, cmdset2=None, stats=None, processes=None, numrep=1):
"""Benchmark repetitions of one or more commands against, optionally, an alternative set.
When there are two sets, the repetitions are interleaved.
numrep is the repetition count.
cmdset1 and cmdlist 2 are lists of commands to be timed. Remember to include
an EXECUTE if you are timing transformation commands that would not otherwise
generate a data pass.
stats is a list of statistics to be collected or ALL.
processes is a list of the Statistics processes to monitor. All are assumed by
default, but only one Statistics product session can be running. Process names
vary some by Statistics version.
outfile specifies a file to get the individual results for the specified measures in a csv format
suitable for reading into Statistics. Variable names are written on the first line.
"""
# debugging
# makes debug apply only to the current thread
#try:
#import wingdbstub
#if wingdbstub.debugger != None:
#import time
#wingdbstub.debugger.StopDebug()
#time.sleep(2)
#wingdbstub.debugger.StartDebug()
#import thread
#wingdbstub.debugger.SetDebugThreads({thread.get_ident(): 1}, default_policy=0)
## for V19 use
## ###SpssClient._heartBeat(False)
#except:
#pass
numsets = cmdset2 and 2 or 1
processnames = set(["spssengine", "stats", "startx", "startx32"])
if processes is None:
processes = processnames
# bstat is a benchstats object containing the statistics to be collected.
bstat = benchstats(processes, stats)
cmd1 = """INSERT FILE="%(cmdset1)s".""" % locals()
cmd2 = """INSERT FILE="%(cmdset2)s".""" % locals() # run only if cmdset2 not null
# Run one or two command files repeatedly generating before and after resource
# records per process for each repetition
for i in range(numrep):
bstat.snap(i, group=1) #start
spss.Submit(cmd1)
bstat.snap(i, group=1) #stop
if cmdset2:
bstat.snap(i, group=2) #start
spss.Submit(cmd2)
bstat.snap(i, group=2) #stop
bstat.save(outfile, sets=["Start", "Stop"])
def Run(args):
"""Execute the STATS BENCHMRK extension command"""
args = args[list(args.keys())[0]]
oobj = Syntax([
Template("CMDSET1", subc="", ktype="literal", var="cmdset1"),
Template("CMDSET2", subc="", ktype="literal", var="cmdset2"),
Template("OUTFILE", subc="OUTPUT", ktype="literal", var="outfile"),
Template("NUMREP", subc="", ktype="int", var="numrep",
vallist=[1]),
Template("PROCESSES", subc="", ktype="str", var="processes", islist=True,
vallist=["spssengine", "stats", "startx", "startx32", "statisticsb"]),
Template("STATISTICS", subc="OUTPUT", ktype="str", var="stats", islist=True),
Template("HELP", subc="", ktype="bool")])
#enable localization
global _
try:
(_("---"))
except:
def _(msg):
return msg
# A HELP subcommand overrides all else
if "HELP" in args:
#print helptext
helper()
else:
processcmd(oobj, args, benchmark)
def helper():
"""open html help in default browser window
The location is computed from the current module name"""
import webbrowser, os.path
path = os.path.splitext(__file__)[0]
helpspec = "file://" + path + os.path.sep + \
"markdown.html"
# webbrowser.open seems not to work well
browser = webbrowser.get()
if not browser.open_new(helpspec):
print(("Help file not found:" + helpspec))
try: #override
from extension import helper
except:
pass
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=
"""Parallelization utility optimizer."""
from __future__ import absolute_import
__all__ = ['split_data', 'split_and_load', 'clip_global_norm',
'check_sha1', 'download']
import os
import sys
import hashlib
import uuid
import warnings
import collections
import weakref
import requests
import numpy as np
from .. import ndarray
from ..util import is_np_shape, is_np_array
from .. import numpy as _mx_np # pylint: disable=reimported
def split_data(data, num_slice, batch_axis=0, even_split=True):
"""Splits an NDArray into `num_slice` slices along `batch_axis`.
Usually used for data parallelism where each slices is sent
to one device (i.e. GPU).
Parameters
----------
data : NDArray
A batch of data.
num_slice : int
Number of desired slices.
batch_axis : int, default 0
The axis along which to slice.
even_split : bool, default True
Whether to force all slices to have the same number of elements.
If `True`, an error will be raised when `num_slice` does not evenly
divide `data.shape[batch_axis]`.
Returns
-------
list of NDArray
Return value is a list even if `num_slice` is 1.
"""
size = data.shape[batch_axis]
if even_split and size % num_slice != 0:
raise ValueError(
"data with shape %s cannot be evenly split into %d slices along axis %d. " \
"Use a batch size that's multiple of %d or set even_split=False to allow " \
"uneven partitioning of data."%(
str(data.shape), num_slice, batch_axis, num_slice))
step = size // num_slice
# If size < num_slice, make fewer slices
if not even_split and size < num_slice:
step = 1
num_slice = size
if batch_axis == 0:
slices = [data[i*step:(i+1)*step] if i < num_slice - 1 else data[i*step:size]
for i in range(num_slice)]
elif even_split:
if is_np_array():
slices = _mx_np.split(data, indices_or_sections=num_slice, axis=batch_axis)
else:
slices = ndarray.split(data, num_outputs=num_slice, axis=batch_axis)
else:
if is_np_array():
indices = [step * i for i in range(1, num_slice)]
slices = _mx_np.split(data, indices_or_sections=indices, axis=batch_axis)
else:
slices = [ndarray.slice_axis(data, batch_axis, i*step, (i+1)*step)
if i < num_slice - 1 else
ndarray.slice_axis(data, batch_axis, i*step, size)
for i in range(num_slice)]
return slices
def split_and_load(data, ctx_list, batch_axis=0, even_split=True):
"""Splits an NDArray into `len(ctx_list)` slices along `batch_axis` and loads
each slice to one context in `ctx_list`.
Parameters
----------
data : NDArray or ndarray
A batch of data.
ctx_list : list of Context
A list of Contexts.
batch_axis : int, default 0
The axis along which to slice.
even_split : bool, default True
Whether to force all slices to have the same number of elements.
Returns
-------
list of NDArrays or ndarrays
Each corresponds to a context in `ctx_list`.
"""
array_fn = _mx_np.array if is_np_array() else ndarray.array
if not isinstance(data, ndarray.NDArray):
data = array_fn(data, ctx=ctx_list[0])
if len(ctx_list) == 1:
return [data.as_in_context(ctx_list[0])]
slices = split_data(data, len(ctx_list), batch_axis, even_split)
return [i.as_in_context(ctx) for i, ctx in zip(slices, ctx_list)]
def clip_global_norm(arrays, max_norm, check_isfinite=True):
"""Rescales NDArrays so that the sum of their 2-norm is smaller than `max_norm`.
Parameters
----------
arrays : list of NDArray
max_norm : float
check_isfinite : bool, default True
If True, check that the total_norm is finite (not nan or inf). This
requires a blocking .asscalar() call.
Returns
-------
NDArray or float
Total norm. Return type is NDArray of shape (1,) if check_isfinite is
False. Otherwise a float is returned.
"""
def _norm(array):
if array.stype == 'default':
x = array.reshape((-1,))
return ndarray.dot(x, x)
return array.norm().square()
assert len(arrays) > 0
ctx = arrays[0].context
total_norm = ndarray.add_n(*[_norm(arr).as_in_context(ctx) for arr in arrays])
total_norm = ndarray.sqrt(total_norm)
if check_isfinite:
if not np.isfinite(total_norm.asscalar()):
warnings.warn(
UserWarning('nan or inf is detected. '
'Clipping results will be undefined.'), stacklevel=2)
scale = max_norm / (total_norm + 1e-8)
scale = ndarray.min(ndarray.concat(scale, ndarray.ones(1, ctx=ctx), dim=0))
for arr in arrays:
arr *= scale.as_in_context(arr.context)
if check_isfinite:
return total_norm.asscalar()
else:
return total_norm
def _indent(s_, numSpaces):
"""Indent string
"""
s = s_.split('\n')
if len(s) == 1:
return s_
first = s.pop(0)
s = [first] + [(numSpaces * ' ') + line for line in s]
s = '\n'.join(s)
return s
def check_sha1(filename, sha1_hash):
"""Check whether the sha1 hash of the file content matches the expected hash.
Parameters
----------
filename : str
Path to the file.
sha1_hash : str
Expected sha1 hash in hexadecimal digits.
Returns
-------
bool
Whether the file content matches the expected hash.
"""
sha1 = hashlib.sha1()
with open(filename, 'rb') as f:
while True:
data = f.read(1048576)
if not data:
break
sha1.update(data)
return sha1.hexdigest() == sha1_hash
if not sys.platform.startswith('win32'):
# refer to https://github.com/untitaker/python-atomicwrites
def _replace_atomic(src, dst):
"""Implement atomic os.replace with linux and OSX. Internal use only"""
try:
os.rename(src, dst)
except OSError:
try:
os.remove(src)
except OSError:
pass
finally:
raise OSError(
'Moving downloaded temp file - {}, to {} failed. \
Please retry the download.'.format(src, dst))
else:
import ctypes
_MOVEFILE_REPLACE_EXISTING = 0x1
# Setting this value guarantees that a move performed as a copy
# and delete operation is flushed to disk before the function returns.
# The flush occurs at the end of the copy operation.
_MOVEFILE_WRITE_THROUGH = 0x8
_windows_default_flags = _MOVEFILE_WRITE_THROUGH
text_type = unicode if sys.version_info[0] == 2 else str # pylint: disable=undefined-variable
def _str_to_unicode(x):
"""Handle text decoding. Internal use only"""
if not isinstance(x, text_type):
return x.decode(sys.getfilesystemencoding())
return x
def _handle_errors(rv, src):
"""Handle WinError. Internal use only"""
if not rv:
msg = ctypes.FormatError(ctypes.GetLastError())
# if the MoveFileExW fails(e.g. fail to acquire file lock), removes the tempfile
try:
os.remove(src)
except OSError:
pass
finally:
raise OSError(msg)
def _replace_atomic(src, dst):
"""Implement atomic os.replace with windows.
refer to https://docs.microsoft.com/en-us/windows/desktop/api/winbase/nf-winbase-movefileexw
The function fails when one of the process(copy, flush, delete) fails.
Internal use only"""
_handle_errors(ctypes.windll.kernel32.MoveFileExW(
_str_to_unicode(src), _str_to_unicode(dst),
_windows_default_flags | _MOVEFILE_REPLACE_EXISTING
), src)
def download(url, path=None, overwrite=False, sha1_hash=None, retries=5, verify_ssl=True):
"""Download an given URL
Parameters
----------
url : str
URL to download
path : str, optional
Destination path to store downloaded file. By default stores to the
current directory with same name as in url.
overwrite : bool, optional
Whether to overwrite destination file if already exists.
sha1_hash : str, optional
Expected sha1 hash in hexadecimal digits. Will ignore existing file when hash is specified
but doesn't match.
retries : integer, default 5
The number of times to attempt the download in case of failure or non 200 return codes
verify_ssl : bool, default True
Verify SSL certificates.
Returns
-------
str
The file path of the downloaded file.
"""
if path is None:
fname = url.split('/')[-1]
# Empty filenames are invalid
assert fname, 'Can\'t construct file-name from this URL. ' \
'Please set the `path` option manually.'
else:
path = os.path.expanduser(path)
if os.path.isdir(path):
fname = os.path.join(path, url.split('/')[-1])
else:
fname = path
assert retries >= 0, "Number of retries should be at least 0, currently it's {}".format(
retries)
if not verify_ssl:
warnings.warn(
'Unverified HTTPS request is being made (verify_ssl=False). '
'Adding certificate verification is strongly advised.')
if overwrite or not os.path.exists(fname) or (sha1_hash and not check_sha1(fname, sha1_hash)):
dirname = os.path.dirname(os.path.abspath(os.path.expanduser(fname)))
if not os.path.exists(dirname):
os.makedirs(dirname)
while retries + 1 > 0:
# Disable pyling too broad Exception
# pylint: disable=W0703
try:
print('Downloading {} from {}...'.format(fname, url))
r = requests.get(url, stream=True, verify=verify_ssl)
if r.status_code != 200:
raise RuntimeError('Failed downloading url {}'.format(url))
# create uuid for temporary files
random_uuid = str(uuid.uuid4())
with open('{}.{}'.format(fname, random_uuid), 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
# if the target file exists(created by other processes)
# and have the same hash with target file
# delete the temporary file
if not os.path.exists(fname) or (sha1_hash and not check_sha1(fname, sha1_hash)):
# atmoic operation in the same file system
_replace_atomic('{}.{}'.format(fname, random_uuid), fname)
else:
try:
os.remove('{}.{}'.format(fname, random_uuid))
except OSError:
pass
finally:
warnings.warn(
'File {} exists in file system so the downloaded file is deleted'.format(fname))
if sha1_hash and not check_sha1(fname, sha1_hash):
raise UserWarning(
'File {} is downloaded but the content hash does not match.'
' The repo may be outdated or download may be incomplete. '
'If the "repo_url" is overridden, consider switching to '
'the default repo.'.format(fname))
break
except Exception as e:
retries -= 1
if retries <= 0:
raise e
print('download failed due to {}, retrying, {} attempt{} left'
.format(repr(e), retries, 's' if retries > 1 else ''))
return fname
def _get_repo_url():
"""Return the base URL for Gluon dataset and model repository."""
default_repo = 'https://apache-mxnet.s3-accelerate.dualstack.amazonaws.com/'
repo_url = os.environ.get('MXNET_GLUON_REPO', default_repo)
if repo_url[-1] != '/':
repo_url = repo_url+'/'
return repo_url
def _get_repo_file_url(namespace, filename):
"""Return the URL for hosted file in Gluon repository.
Parameters
----------
namespace : str
Namespace of the file.
filename : str
Name of the file
"""
return '{base_url}{namespace}/{filename}'.format(base_url=_get_repo_url(),
namespace=namespace,
filename=filename)
def _brief_print_list(lst, limit=7):
"""Print at most `limit` elements of list."""
lst = list(lst)
if len(lst) > limit:
return _brief_print_list(lst[:limit//2], limit) + ', ..., ' + \
_brief_print_list(lst[-limit//2:], limit)
return ', '.join(["'%s'"%str(i) for i in lst])
class HookHandle(object):
"""A handle that can attach/detach a hook."""
def __init__(self):
self._hooks_dict_ref = None
self._id = None
def attach(self, hooks_dict, hook):
assert not self._hooks_dict_ref, 'The same handle cannot be attached twice.'
self._id = id(hook)
hooks_dict[self._id] = hook
self._hooks_dict_ref = weakref.ref(hooks_dict)
def detach(self):
hooks_dict = self._hooks_dict_ref()
if hooks_dict is not None and self._id in hooks_dict:
del hooks_dict[self._id]
def __getstate__(self):
return (self._hooks_dict_ref(), self._id)
def __setstate__(self, state):
if state[0] is None:
self._hooks_dict_ref = weakref.ref(collections.OrderedDict())
else:
self._hooks_dict_ref = weakref.ref(state[0])
self._id = state[1]
def __enter__(self):
return self
def __exit__(self, ptype, value, trace):
self.detach()
def shape_is_known(shape):
"""Check whether a shape is completely known with or without np semantics.
Please see the doc of is_np_shape for more details.
"""
if shape is None:
return False
unknown_dim_size = -1 if is_np_shape() else 0
if len(shape) == 0:
return unknown_dim_size == -1
for dim_size in shape:
if dim_size == unknown_dim_size:
return False
assert dim_size > unknown_dim_size, "shape dimension size cannot be less than {}, while " \
"received {}".format(unknown_dim_size, dim_size)
return True
def _check_same_symbol_type(symbols):
"""Check whether all the symbols in the list are of the same type.
Raise type error if the types are different. Return the class of
the symbols."""
from ..symbol.numpy import _Symbol as np_symbol
from ..symbol import Symbol as nd_symbol
is_np_sym = isinstance(symbols[0], np_symbol)
for s in symbols[1:]:
if is_np_sym != isinstance(s, np_symbol):
raise TypeError('Found both classic symbol (mx.sym.Symbol) and numpy symbol '
'(mx.sym.np._Symbol) in outputs. This will prevent you from building '
'a computation graph by grouping them since different types of symbols '
'are not allowed to be grouped in Gluon to form a computation graph. '
'You will need to convert them to the same type of symbols, either '
'classic or numpy following this rule: if you want numpy ndarray '
'output(s) from the computation graph, please convert all the classic '
'symbols in the list to numpy symbols by calling `as_np_ndarray()` '
'on each of them; if you want classic ndarray output(s) from the '
'computation graph, please convert all the numpy symbols in the list '
'to classic symbols by calling `as_nd_ndarray()` on each of them.')
return np_symbol if is_np_sym else nd_symbol
def _check_all_np_ndarrays(out):
"""Check if ndarrays/symbols in out are all np.ndarray/np._Symbol."""
from ..numpy import ndarray as np_ndarray
from ..symbol.numpy import _Symbol as np_symbol
from ..symbol import Symbol as nd_symbol
from ..ndarray import NDArray as nd_ndarray
# pylint: disable=no-else-raise
if isinstance(out, (nd_ndarray, nd_symbol)) and not isinstance(out, (np_ndarray, np_symbol)):
raise TypeError("Block's output ndarrays/symbols must be of type `mxnet.numpy.ndarray`"
" or `mxnet.symbol.numpy._Symbol`, while got output type {}"
.format(str(type(out))))
elif isinstance(out, (list, tuple)):
for i in out:
_check_all_np_ndarrays(i)
# pylint: enable=no-else-raise
|
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import uuid
import pytest
from dci.common.exceptions import DCIException
from dci.common.schemas import (
check_json_is_valid,
create_user_schema,
update_user_schema,
)
def test_create_users(admin, team_id):
pu = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
)
assert pu.status_code == 201
pu = pu.data
pu_id = pu["user"]["id"]
gu = admin.get("/api/v1/users/%s" % pu_id).data
assert gu["user"]["name"] == "pname"
assert gu["user"]["timezone"] == "UTC"
def test_create_user_withouta_team(admin):
pu = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
)
assert pu.status_code == 201
pu = pu.data
pu_id = pu["user"]["id"]
gu = admin.get("/api/v1/users/%s" % pu_id).data
assert gu["user"]["name"] == "pname"
assert gu["user"]["timezone"] == "UTC"
def test_create_users_already_exist(admin, team_id):
pstatus_code = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
).status_code
assert pstatus_code == 201
pstatus_code = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
).status_code
assert pstatus_code == 409
def test_get_teams_of_user(admin, user_id, team_id, team_user_id):
res = admin.post("/api/v1/teams/%s/users/%s" % (team_id, user_id))
assert res.status_code == 201
res = admin.post("/api/v1/teams/%s/users/%s" % (team_user_id, user_id))
assert res.status_code == 201
uteams = admin.get("/api/v1/users/%s/teams" % user_id)
assert uteams.status_code == 200
assert len(uteams.data["teams"]) == 2
team_ids = {t["id"] for t in uteams.data["teams"]}
assert team_ids == set([team_id, team_user_id])
def test_get_all_users(admin, team_id):
# TODO(yassine): Currently there is already 3 users created in the DB,
# this will be fixed later.
db_users = admin.get("/api/v1/users?sort=created_at").data
db_users = db_users["users"]
db_users_ids = [db_t["id"] for db_t in db_users]
user_1 = admin.post(
"/api/v1/users",
data={
"name": "pname1",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
).data
user_2 = admin.post(
"/api/v1/users",
data={
"name": "pname2",
"password": "ppass",
"fullname": "Q Name",
"email": "qname@example.org",
},
).data
db_users_ids.extend([user_1["user"]["id"], user_2["user"]["id"]])
db_all_users = admin.get("/api/v1/users?sort=created_at").data
db_all_users = db_all_users["users"]
db_all_users_ids = [db_t["id"] for db_t in db_all_users]
assert db_all_users_ids == db_users_ids
def test_where_invalid(admin):
err = admin.get("/api/v1/users?where=id")
assert err.status_code == 400
assert err.data["message"] == "Request malformed"
assert err.data["payload"]["error"] == "where: 'id' is not a 'key value csv'"
def test_get_all_users_with_team(admin):
# TODO(yassine): Currently there is already 3 users created in the DB,
# this will be fixed later.
db_users = admin.get("/api/v1/users?embed=team&where=name:admin").data
assert "users" in db_users
db_users = db_users["users"]
assert "team" in db_users[0]
def test_get_all_users_with_where(admin, team_id):
pu = admin.post(
"/api/v1/users",
data={
"name": "pname1",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
).data
pu_id = pu["user"]["id"]
db_u = admin.get("/api/v1/users?where=id:%s" % pu_id).data
db_u_id = db_u["users"][0]["id"]
assert db_u_id == pu_id
db_u = admin.get("/api/v1/users?where=name:pname1").data
db_u_id = db_u["users"][0]["id"]
assert db_u_id == pu_id
def test_get_all_users_with_pagination(admin, team_id):
users = admin.get("/api/v1/users").data
current_users = users["_meta"]["count"]
admin.post(
"/api/v1/users",
data={
"name": "pname1",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
)
admin.post(
"/api/v1/users",
data={
"name": "pname2",
"password": "ppass",
"fullname": "Q Name",
"email": "qname@example.org",
},
)
admin.post(
"/api/v1/users",
data={
"name": "pname3",
"password": "ppass",
"fullname": "R Name",
"email": "rname@example.org",
},
)
admin.post(
"/api/v1/users",
data={
"name": "pname4",
"password": "ppass",
"fullname": "S Name",
"email": "sname@example.org",
},
)
users = admin.get("/api/v1/users").data
assert users["_meta"]["count"] == current_users + 4
# verify limit and offset are working well
users = admin.get("/api/v1/users?limit=2&offset=0").data
assert len(users["users"]) == 2
users = admin.get("/api/v1/users?limit=2&offset=2").data
assert len(users["users"]) == 2
# if offset is out of bound, the api returns an empty list
users = admin.get("/api/v1/users?limit=5&offset=300")
assert users.status_code == 200
assert users.data["users"] == []
def test_get_all_users_with_sort(admin, team_id):
# TODO(yassine): Currently there is already 3 users created in the DB,
# this will be fixed later.
db_users = admin.get("/api/v1/users?sort=created_at").data
db_users = db_users["users"]
# create 2 users ordered by created time
user_1 = admin.post(
"/api/v1/users",
data={
"name": "pname1",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
).data["user"]
user_2 = admin.post(
"/api/v1/users",
data={
"name": "pname2",
"password": "ppass",
"fullname": "Q Name",
"email": "qname@example.org",
},
).data["user"]
gusers = admin.get("/api/v1/users?sort=created_at").data
db_users.extend([user_1, user_2])
assert gusers["users"][0]["id"] == db_users[0]["id"]
assert gusers["users"][1]["id"] == db_users[1]["id"]
# test in reverse order
db_users.reverse()
gusers = admin.get("/api/v1/users?sort=-created_at").data
assert gusers["users"][0]["id"] == db_users[0]["id"]
assert gusers["users"][1]["id"] == db_users[1]["id"]
def test_get_user_by_id(admin, team_id):
puser = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
).data
puser_id = puser["user"]["id"]
# get by uuid
created_user = admin.get("/api/v1/users/%s" % puser_id)
assert created_user.status_code == 200
created_user = created_user.data
assert created_user["user"]["id"] == puser_id
def test_get_user_not_found(admin):
result = admin.get("/api/v1/users/%s" % uuid.uuid4())
assert result.status_code == 404
def test_put_users(admin, team_id):
pu = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"timezone": "Europe/Paris",
"email": "pname@example.org",
},
)
assert pu.status_code == 201
pu_etag = pu.headers.get("ETag")
gu = admin.get("/api/v1/users/%s" % pu.data["user"]["id"])
assert gu.status_code == 200
assert gu.data["user"]["timezone"] == "Europe/Paris"
ppu = admin.put(
"/api/v1/users/%s" % gu.data["user"]["id"],
data={"name": "nname"},
headers={"If-match": pu_etag},
)
assert ppu.status_code == 200
assert ppu.data["user"]["name"] == "nname"
def test_change_user_state(admin, team_id):
pu = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
)
assert pu.status_code == 201
pu_etag = pu.headers.get("ETag")
gu = admin.get("/api/v1/users/%s" % pu.data["user"]["id"])
assert gu.status_code == 200
ppu = admin.put(
"/api/v1/users/%s" % gu.data["user"]["id"],
data={"state": "inactive"},
headers={"If-match": pu_etag},
)
assert ppu.status_code == 200
assert ppu.data["user"]["state"] == "inactive"
def test_change_user_to_invalid_state(admin, team_id):
pu = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
)
assert pu.status_code == 201
pu_etag = pu.headers.get("ETag")
gu = admin.get("/api/v1/users/%s" % pu.data["user"]["id"])
assert gu.status_code == 200
ppu = admin.put(
"/api/v1/users/%s" % gu.data["user"]["id"],
data={"state": "kikoolol"},
headers={"If-match": pu_etag},
)
assert ppu.status_code == 400
gu = admin.get("/api/v1/users/%s" % pu.data["user"]["id"])
assert gu.status_code == 200
assert gu.data["user"]["state"] == "active"
def test_delete_user_by_id(admin, team_id):
pu = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
)
pu_etag = pu.headers.get("ETag")
pu_id = pu.data["user"]["id"]
assert pu.status_code == 201
created_user = admin.get("/api/v1/users/%s" % pu_id)
assert created_user.status_code == 200
deleted_user = admin.delete(
"/api/v1/users/%s" % pu_id, headers={"If-match": pu_etag}
)
assert deleted_user.status_code == 204
gu = admin.get("/api/v1/users/%s" % pu_id)
assert gu.status_code == 404
def test_delete_user_with_no_team(admin, user_no_team):
deleted_user = admin.delete(
"/api/v1/users/%s" % user_no_team["id"],
headers={"If-match": user_no_team["etag"]},
)
assert deleted_user.status_code == 204
def test_delete_user_not_found(admin):
result = admin.delete(
"/api/v1/users/%s" % uuid.uuid4(), headers={"If-match": "mdr"}
)
assert result.status_code == 404
# Tests for the isolation
def test_create_user_as_user(user):
# simple user cannot add a new user to its team
pu = user.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
)
assert pu.status_code == 401
def test_get_all_users_as_user(user):
users = user.get("/api/v1/users")
assert users.status_code == 401
def test_get_user_as_user(user, admin):
# admin does not belong to this user's team
padmin = admin.get("/api/v1/users?where=name:admin")
padmin = admin.get("/api/v1/users/%s" % padmin.data["users"][0]["id"])
guser = user.get("/api/v1/users/%s" % padmin.data["user"]["id"])
assert guser.status_code == 401
def get_user(flask_user, name):
get = flask_user.get("/api/v1/users?where=name:%s" % name)
get2 = flask_user.get("/api/v1/users/%s" % get.data["users"][0]["id"])
return get2.data["user"], get2.headers.get("ETag")
def test_admin_can_update_another_user(admin):
user, etag = get_user(admin, "user")
assert (
admin.put(
"/api/v1/users/%s" % user["id"],
data={"name": "new_name"},
headers={"If-match": etag},
).status_code
== 200
)
def test_user_cant_update_him(admin, user):
user_data, user_etag = get_user(admin, "user")
assert (
user.put(
"/api/v1/users/%s" % user_data["id"],
data={"name": "new_name"},
headers={"If-match": user_etag},
).status_code
== 401
)
def test_delete_as_user_epm(user, epm, admin):
puser = epm.get("/api/v1/users?where=name:user")
puser = epm.get("/api/v1/users/%s" % puser.data["users"][0]["id"])
user_etag = puser.headers.get("ETag")
user_delete = user.delete(
"/api/v1/users/%s" % puser.data["user"]["id"], headers={"If-match": user_etag}
)
assert user_delete.status_code == 401
user_delete = epm.delete(
"/api/v1/users/%s" % puser.data["user"]["id"], headers={"If-match": user_etag}
)
assert user_delete.status_code == 401
user_delete = admin.delete(
"/api/v1/users/%s" % puser.data["user"]["id"], headers={"If-match": user_etag}
)
assert user_delete.status_code == 204
def test_success_update_field_by_field(admin, team_id):
user = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"email": "pname@example.org",
},
).data["user"]
t = admin.get("/api/v1/users/%s" % user["id"]).data["user"]
admin.put(
"/api/v1/users/%s" % user["id"],
data={"state": "inactive"},
headers={"If-match": t["etag"]},
)
t = admin.get("/api/v1/users/%s" % user["id"]).data["user"]
assert t["name"] == "pname"
assert t["state"] == "inactive"
admin.put(
"/api/v1/users/%s" % user["id"],
data={"name": "newuser"},
headers={"If-match": t["etag"]},
)
t = admin.get("/api/v1/users/%s" % user["id"]).data["user"]
assert t["name"] == "newuser"
assert t["state"] == "inactive"
def test_get_current_user(user):
user_me = user.get("/api/v1/users/me")
assert user_me.status_code == 200
assert user_me.data["user"]["name"] == "user"
def test_update_current_user_password(admin, user):
user_data, user_etag = get_user(admin, "user")
assert user.get("/api/v1/users/me").status_code == 200
assert (
user.put(
"/api/v1/users/me",
data={"current_password": "user", "new_password": "password"},
headers={"If-match": user_etag},
).status_code
== 200
)
assert user.get("/api/v1/users/me").status_code == 401
user_data, user_etag = get_user(admin, "user")
assert (
admin.put(
"/api/v1/users/%s" % user_data["id"],
data={"password": "user"},
headers={"If-match": user_etag},
).status_code
== 200
)
assert user.get("/api/v1/users/me").status_code == 200
def test_update_current_user_current_password_wrong(admin, user):
user_data, user_etag = get_user(admin, "user")
assert user.get("/api/v1/users/me").status_code == 200
assert (
user.put(
"/api/v1/users/me",
data={"current_password": "wrong_password", "new_password": ""},
headers={"If-match": user_etag},
).status_code
== 400
)
assert user.get("/api/v1/users/me").status_code == 200
def test_update_current_user_new_password_empty(admin, user):
user_data, user_etag = get_user(admin, "user")
assert user.get("/api/v1/users/me").status_code == 200
assert (
user.put(
"/api/v1/users/me",
data={"current_password": "user", "new_password": ""},
headers={"If-match": user_etag},
).status_code
== 200
)
assert user.get("/api/v1/users/me").status_code == 200
def test_update_current_user(admin, user):
user_data, user_etag = get_user(admin, "user")
assert user.get("/api/v1/users/me").status_code == 200
me = user.put(
"/api/v1/users/me",
data={
"current_password": "user",
"new_password": "",
"email": "new_email@example.org",
"fullname": "New Name",
"timezone": "Europe/Paris",
},
headers={"If-match": user_etag},
)
assert me.status_code == 200
assert me.data["user"]["email"] == "new_email@example.org"
assert me.data["user"]["fullname"] == "New Name"
assert me.data["user"]["timezone"] == "Europe/Paris"
def test_update_current_user_sso(rh_employee, app, admin):
assert rh_employee.get("/api/v1/users/me").status_code == 200
user_data, user_etag = get_user(admin, "rh_employee")
me = rh_employee.put(
"/api/v1/users/me",
data={
"email": "new_email@example.org",
"fullname": "New Name",
"timezone": "Europe/Paris",
},
headers={"If-match": user_etag},
)
assert me.status_code == 200
assert me.data["user"]["email"] == "new_email@example.org"
assert me.data["user"]["fullname"] == "New Name"
assert me.data["user"]["timezone"] == "Europe/Paris"
def test_get_embed_remotecis(user, remoteci_user_id, user_id):
r = user.post("/api/v1/remotecis/%s/users" % remoteci_user_id)
assert r.status_code == 201
me = user.get("/api/v1/users/me?embed=remotecis").data["user"]
assert me["remotecis"][0]["id"] == remoteci_user_id
def test_success_ensure_put_me_api_secret_is_not_leaked(admin, user):
"""Test to ensure API secret is not leaked during update."""
user_data, user_etag = get_user(admin, "user")
res = user.put(
"/api/v1/users/me",
data={"current_password": "user", "new_password": "password"},
headers={"If-match": user_etag},
)
assert res.status_code == 200
assert "password" not in res.data["user"]
def test_success_ensure_put_api_secret_is_not_leaked(admin, team_id):
pu = admin.post(
"/api/v1/users",
data={
"name": "pname",
"password": "ppass",
"fullname": "P Name",
"timezone": "Europe/Paris",
"email": "pname@example.org",
},
)
pu_etag = pu.headers.get("ETag")
ppu = admin.put(
"/api/v1/users/%s" % pu.data["user"]["id"],
data={"name": "nname"},
headers={"If-match": pu_etag},
)
assert ppu.status_code == 200
assert "password" not in ppu.data["user"]
@pytest.fixture
def user_json():
return {
"name": "jdoe",
"fullname": "John Doe",
"email": "jdoe@example.org",
}
def test_create_user_schema(user_json):
try:
check_json_is_valid(create_user_schema, user_json)
except DCIException:
pytest.fail("create_user_schema is invalid")
def test_create_user_schema_required_value(user_json):
with pytest.raises(DCIException) as e:
check_json_is_valid(create_user_schema, {})
result = e.value
assert result.status_code == 400
assert len(result.payload["errors"]) == len(user_json.keys())
errors = "\n".join(result.payload["errors"])
for key in user_json.keys():
assert "'%s' is a required property" % key in errors
def test_create_user_schema_optional_value(user_json):
try:
user_json["timezone"] = "Europe/Paris"
check_json_is_valid(create_user_schema, user_json)
except DCIException:
pytest.fail("create_user_schema is invalid")
def test_create_user_schema_no_extra_field(user_json):
with pytest.raises(DCIException):
user_json["extra_field"] = "extra field"
check_json_is_valid(create_user_schema, user_json)
def test_create_user_schema_team_id_type(user_json):
with pytest.raises(DCIException):
user_json["team_id"] = "not an uuid"
check_json_is_valid(create_user_schema, user_json)
def test_create_user_schema_email_format(user_json):
with pytest.raises(DCIException):
user_json["email"] = "not an email"
check_json_is_valid(create_user_schema, user_json)
def test_update_user_schema():
try:
check_json_is_valid(
update_user_schema,
{
"id": "909b4ad1-1c38-4fc3-9454-57dc6d80b44d",
"etag": "8407cdbf-04d1-4453-8d35-19e4425c535b",
"name": "jdoe",
"fullname": "John Doe",
"email": "jdoe@example.org",
},
)
except DCIException:
pytest.fail("update_user_schema is invalid")
def test_get_user_then_update_user_doesnt_raise_error_500(admin, team_id):
request = admin.post(
"/api/v1/users",
data={
"name": "user1",
"password": "password for user1",
"fullname": "Mr Uesr 1",
"email": "user1@example.org",
},
)
user = request.data["user"]
admin.post("/api/v1/teams/%s/users/%s" % (team_id, user["id"]))
user = admin.get("/api/v1/users/%s" % request.data["user"]["id"]).data["user"]
user["fullname"] = "Mr User 1"
request = admin.put(
"/api/v1/users/%s" % user["id"], data=user, headers={"If-match": user["etag"]}
)
assert request.status_code == 200
assert request.data["user"]["fullname"] == "Mr User 1"
|
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import importlib
import os
import shutil
import sys
import sphinx_rtd_theme
# -- Mock necessary classes -----------------------------------------------
from unittest.mock import MagicMock
sys.path.insert(0, os.path.abspath(".")) # noqa: F402
sys.path.insert(0, os.path.abspath("..")) # noqa: F402
MOCK_MODULES = ["pywayland._ffi"] # noqa: F402
sys.modules.update((mod_name, MagicMock()) for mod_name in MOCK_MODULES) # noqa: F402
from pywayland import __version__
# -- Build pywayland.protocol w/docs --------------------------------------
from protocol_build import wayland_version, protocols_version, protocols_build
protocol_build_dir = "../pywayland/protocol/"
protocol_doc_dir = "module/protocol"
index_header = """\
.. _protocol:
Protocol Modules
================
Wayland protocols built against Wayland {} and Wayland Protocols {}.
.. toctree::
:maxdepth: 2
""".format(
wayland_version, protocols_version
)
protocol_header = """\
.. module:: pywayland.protocol.{module}
{module} Module
{empty:=^{len}}======="""
protocol_rst = """\
{protocol}
{empty:-^{len}}
.. wl_protocol:: pywayland.protocol.{module} {protocol}"""
# There is probably a better way to do this in Sphinx, templating or something
# ... but this works
def protocol_doc(input_dir, output_dir):
modules = os.listdir(input_dir)
modules = [
module
for module in modules
if os.path.isdir(os.path.join(input_dir, module)) and module != "__pycache__"
]
existing_files = [
filename for filename in os.listdir(output_dir) if filename != "index.rst"
]
rm_files = [
filename
for filename in existing_files
if os.path.splitext(filename)[0] not in modules
]
for rm_file in rm_files:
if os.path.isdir(rm_file):
shutil.rmtree(os.path.join(output_dir, rm_file))
else:
os.remove(os.path.join(output_dir, rm_file))
# Write out the index file
index_file = os.path.join(output_dir, "index.rst")
if os.path.exists(index_file):
with open(index_file) as f:
existing_index = f.read()
else:
existing_index = ""
generated_index = index_header + "".join(
" {}\n".format(m) for m in sorted(modules)
)
if existing_index != generated_index:
with open(index_file, "w") as f:
f.write(generated_index)
for module in modules:
output = [protocol_header.format(module=module, len=len(module), empty="")]
# get all the python files that we want to document
doc_files = os.listdir(os.path.join(input_dir, module))
doc_files = [
os.path.splitext(doc_file)[0]
for doc_file in doc_files
if doc_file != "__init__.py" and os.path.splitext(doc_file)[1] == ".py"
]
# build the rst for each protocol
for doc_file in doc_files:
mod = importlib.import_module(
"pywayland.protocol.{}.{}".format(module, doc_file)
)
# Get out the name of the class in the module
class_name = "".join(x.capitalize() for x in doc_file.split("_"))
for mod_upper in dir(mod):
if mod_upper == class_name:
break
else:
raise RuntimeError(
"Unable to find module: {}, {}".format(doc_file, mod)
)
output.append(
protocol_rst.format(
module=module, protocol=mod_upper, len=len(mod_upper), empty=""
)
)
# build the index.rst for the module
module_file = os.path.join(output_dir, "{}.rst".format(module))
protocol_output = "\n\n".join(output)
# if file exists and is unchanged, skip
if os.path.exists(module_file):
with open(module_file) as f:
existing_output = f.read()
if existing_output == protocol_output:
continue
with open(module_file, "w") as f:
f.write("\n\n".join(output))
# Build the protocol directory on RTD
if os.environ.get("READTHEDOCS", None):
protocols_build(protocol_build_dir)
# Re-build the protocol documentation directory
if not os.path.exists(protocol_doc_dir):
os.makedirs(protocol_doc_dir)
protocol_doc(protocol_build_dir, protocol_doc_dir)
# -- General configuration ------------------------------------------------
extensions = ["sphinx.ext.autodoc", "sphinx_wl_protocol"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "pywayland"
copyright = "2016, Sean Vig"
# The short X.Y version.
version = __version__.split("a")[0]
# The full version, including alpha/beta/rc tags.
release = __version__
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# -- Options for HTML output ----------------------------------------------
# Set the html_theme when building locally
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Output file base name for HTML help builder.
htmlhelp_basename = "pywaylanddoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
("index", "pywayland.tex", "pywayland Documentation", "Sean Vig", "manual"),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [("index", "pywayland", "pywayland Documentation", ["Sean Vig"], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"pywayland",
"pywayland Documentation",
"Sean Vig",
"pywayland",
"Python bindings for the libwayland library",
"Miscellaneous",
),
]
|
|
from django.contrib import admin
from calaccess_raw import models
from .base import BaseAdmin
class AcronymsCdAdmin(BaseAdmin):
list_display = ("acronym", "stands_for", "effect_dt", "a_desc")
date_hierarchy = 'effect_dt'
search_fields = ("acronym", "a_desc")
class AddressCdAdmin(BaseAdmin):
pass
class BallotMeasuresCdAdmin(BaseAdmin):
pass
class EfsFilingLogCdAdmin(BaseAdmin):
pass
class FilersCdAdmin(BaseAdmin):
pass
class FilerAcronymsCdAdmin(BaseAdmin):
pass
class FilerAddressCdAdmin(BaseAdmin):
pass
class FilerEthicsClassCdAdmin(BaseAdmin):
pass
class FilerInterestsCdAdmin(BaseAdmin):
pass
class FilerLinksCdAdmin(BaseAdmin):
pass
class FilerStatusTypesCdAdmin(BaseAdmin):
list_display = (
"status_type",
"status_desc"
)
class FilerToFilerTypeCdAdmin(BaseAdmin):
list_display = (
"filer_id",
"filer_type",
"effect_dt",
"active",
"session_id",
"race",
"district_cd",
"party_cd"
)
list_filter = (
"active",
"filer_type",
"category",
"sub_category",
"category_type",
"party_cd",
"session_id"
)
date_hierarchy = "effect_dt"
search_fields = (
"filer_id",
)
class FilerTypesCdAdmin(BaseAdmin):
list_display = (
"filer_type",
"description",
"grp_type",
"calc_use",
"grace_period",
)
class FilerXrefCdAdmin(BaseAdmin):
pass
class FilingPeriodCdAdmin(BaseAdmin):
list_display = (
"period_id", "start_date", "end_date", "period_desc",
)
search_fields = (
"period_id",
)
class GroupTypesCdAdmin(BaseAdmin):
pass
class HeaderCdAdmin(BaseAdmin):
pass
class HdrCdAdmin(BaseAdmin):
pass
class ImageLinksCdAdmin(BaseAdmin):
pass
class LegislativeSessionsCdAdmin(BaseAdmin):
pass
class LobbyingChgLogCdAdmin(BaseAdmin):
pass
class LobbyistContributions1CdAdmin(BaseAdmin):
pass
class LobbyistContributions2CdAdmin(BaseAdmin):
pass
class LobbyistContributions3CdAdmin(BaseAdmin):
pass
class LobbyistEmployer1CdAdmin(BaseAdmin):
pass
class LobbyistEmployer2CdAdmin(BaseAdmin):
pass
class LobbyistEmployer3CdAdmin(BaseAdmin):
pass
class LobbyistEmployerFirms1CdAdmin(BaseAdmin):
pass
class LobbyistEmployerFirms2CdAdmin(BaseAdmin):
pass
class LobbyistEmpLobbyist1CdAdmin(BaseAdmin):
pass
class LobbyistEmpLobbyist2CdAdmin(BaseAdmin):
pass
class LobbyistFirm1CdAdmin(BaseAdmin):
pass
class LobbyistFirm2CdAdmin(BaseAdmin):
pass
class LobbyistFirm3CdAdmin(BaseAdmin):
pass
class LobbyistFirmEmployer1CdAdmin(BaseAdmin):
pass
class LobbyistFirmEmployer2CdAdmin(BaseAdmin):
pass
class LobbyistFirmLobbyist1CdAdmin(BaseAdmin):
pass
class LobbyistFirmLobbyist2CdAdmin(BaseAdmin):
pass
class LookupCodeAdmin(BaseAdmin):
list_display = (
"code_type",
"code_id",
"code_desc",
)
list_filter = (
"code_type",
)
search_fields = (
"code_type",
"code_id",
"code_desc",
)
class NamesCdAdmin(BaseAdmin):
pass
class ReceivedFilingsCdAdmin(BaseAdmin):
pass
class ReportsCdAdmin(BaseAdmin):
pass
admin.site.register(models.AcronymsCd, AcronymsCdAdmin)
admin.site.register(models.AddressCd, AddressCdAdmin)
admin.site.register(models.BallotMeasuresCd, BallotMeasuresCdAdmin)
admin.site.register(models.EfsFilingLogCd, BaseAdmin)
admin.site.register(models.FilersCd, FilersCdAdmin)
admin.site.register(models.FilerAcronymsCd, FilerAcronymsCdAdmin)
admin.site.register(models.FilerAddressCd, FilerAddressCdAdmin)
admin.site.register(models.FilerEthicsClassCd, FilerEthicsClassCdAdmin)
admin.site.register(models.FilerInterestsCd, FilerInterestsCdAdmin)
admin.site.register(models.FilerLinksCd, FilerLinksCdAdmin)
admin.site.register(models.FilerStatusTypesCd, FilerStatusTypesCdAdmin)
admin.site.register(models.FilerToFilerTypeCd, FilerToFilerTypeCdAdmin)
admin.site.register(models.FilerTypesCd, FilerTypesCdAdmin)
admin.site.register(models.FilerXrefCd, FilerXrefCdAdmin)
admin.site.register(models.FilingPeriodCd, FilingPeriodCdAdmin)
admin.site.register(models.GroupTypesCd, GroupTypesCdAdmin)
admin.site.register(models.HeaderCd, HeaderCdAdmin)
admin.site.register(models.HdrCd, HdrCdAdmin)
admin.site.register(models.ImageLinksCd, ImageLinksCdAdmin)
admin.site.register(models.LegislativeSessionsCd, LegislativeSessionsCdAdmin)
admin.site.register(models.LobbyingChgLogCd, LobbyingChgLogCdAdmin)
admin.site.register(
models.LobbyistContributions1Cd,
LobbyistContributions1CdAdmin
)
admin.site.register(
models.LobbyistContributions2Cd,
LobbyistContributions2CdAdmin
)
admin.site.register(
models.LobbyistContributions3Cd,
LobbyistContributions3CdAdmin
)
admin.site.register(models.LobbyistEmployer1Cd, LobbyistEmployer1CdAdmin)
admin.site.register(models.LobbyistEmployer2Cd, LobbyistEmployer2CdAdmin)
admin.site.register(models.LobbyistEmployer3Cd, LobbyistEmployer3CdAdmin)
admin.site.register(
models.LobbyistEmployerFirms1Cd,
LobbyistEmployerFirms1CdAdmin
)
admin.site.register(
models.LobbyistEmployerFirms2Cd,
LobbyistEmployerFirms2CdAdmin
)
admin.site.register(
models.LobbyistEmpLobbyist1Cd,
LobbyistEmpLobbyist1CdAdmin
)
admin.site.register(
models.LobbyistEmpLobbyist2Cd,
LobbyistEmpLobbyist2CdAdmin
)
admin.site.register(models.LobbyistFirm1Cd, LobbyistFirm1CdAdmin)
admin.site.register(models.LobbyistFirm2Cd, LobbyistFirm2CdAdmin)
admin.site.register(models.LobbyistFirm3Cd, LobbyistFirm3CdAdmin)
admin.site.register(
models.LobbyistFirmEmployer1Cd,
LobbyistFirmEmployer1CdAdmin
)
admin.site.register(
models.LobbyistFirmEmployer2Cd,
LobbyistFirmEmployer2CdAdmin
)
admin.site.register(
models.LobbyistFirmLobbyist1Cd,
LobbyistFirmLobbyist1CdAdmin
)
admin.site.register(
models.LobbyistFirmLobbyist2Cd,
LobbyistFirmLobbyist2CdAdmin
)
admin.site.register(models.LookupCode, LookupCodeAdmin)
admin.site.register(models.NamesCd, NamesCdAdmin)
admin.site.register(models.ReceivedFilingsCd, ReceivedFilingsCdAdmin)
admin.site.register(models.ReportsCd, ReportsCdAdmin)
|
|
#!python
"""Bootstrap distribute installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from distribute_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import os
import shutil
import sys
import time
import fnmatch
import tempfile
import tarfile
import optparse
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
try:
import subprocess
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
except ImportError:
# will be used for python 2.3
def _python_cmd(*args):
args = (sys.executable,) + args
# quoting arguments if windows
if sys.platform == 'win32':
def quote(arg):
if ' ' in arg:
return '"%s"' % arg
return arg
args = [quote(arg) for arg in args]
return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
DEFAULT_VERSION = "0.6.35"
DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
SETUPTOOLS_FAKED_VERSION = "0.6c11"
SETUPTOOLS_PKG_INFO = """\
Metadata-Version: 1.0
Name: setuptools
Version: %s
Summary: xxxx
Home-page: xxx
Author: xxx
Author-email: xxx
License: xxx
Description: xxx
""" % SETUPTOOLS_FAKED_VERSION
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Distribute')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Distribute egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15, no_fake=True):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
try:
import pkg_resources
if not hasattr(pkg_resources, '_distribute'):
if not no_fake:
_fake_setuptools()
raise ImportError
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("distribute>=" + version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of distribute (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U distribute'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
finally:
if not no_fake:
_create_fake_setuptools_pkg_info(to_dir)
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15):
"""Download distribute from a specified location and return its filename
`version` should be a valid distribute version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
tgz_name = "distribute-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
log.warn("Downloading %s", url)
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(saveto, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
return os.path.realpath(saveto)
def _no_sandbox(function):
def __no_sandbox(*args, **kw):
try:
from setuptools.sandbox import DirectorySandbox
if not hasattr(DirectorySandbox, '_old'):
def violation(*args):
pass
DirectorySandbox._old = DirectorySandbox._violation
DirectorySandbox._violation = violation
patched = True
else:
patched = False
except ImportError:
patched = False
try:
return function(*args, **kw)
finally:
if patched:
DirectorySandbox._violation = DirectorySandbox._old #@UndefinedVariable
del DirectorySandbox._old
return __no_sandbox
def _patch_file(path, content):
"""Will backup the file then patch it"""
f = open(path)
existing_content = f.read()
f.close()
if existing_content == content:
# already patched
log.warn('Already patched.')
return False
log.warn('Patching...')
_rename_path(path)
f = open(path, 'w')
try:
f.write(content)
finally:
f.close()
return True
_patch_file = _no_sandbox(_patch_file)
def _same_content(path, content):
f = open(path)
existing_content = f.read()
f.close()
return existing_content == content
def _rename_path(path):
new_name = path + '.OLD.%s' % time.time()
log.warn('Renaming %s to %s', path, new_name)
os.rename(path, new_name)
return new_name
def _remove_flat_installation(placeholder):
if not os.path.isdir(placeholder):
log.warn('Unkown installation at %s', placeholder)
return False
found = False
for file in os.listdir(placeholder):
if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
found = True
break
if not found:
log.warn('Could not locate setuptools*.egg-info')
return
log.warn('Moving elements out of the way...')
pkg_info = os.path.join(placeholder, file)
if os.path.isdir(pkg_info):
patched = _patch_egg_dir(pkg_info)
else:
patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
if not patched:
log.warn('%s already patched.', pkg_info)
return False
# now let's move the files out of the way
for element in ('setuptools', 'pkg_resources.py', 'site.py'):
element = os.path.join(placeholder, element)
if os.path.exists(element):
_rename_path(element)
else:
log.warn('Could not find the %s element of the '
'Setuptools distribution', element)
return True
_remove_flat_installation = _no_sandbox(_remove_flat_installation)
def _after_install(dist):
log.warn('After install bootstrap.')
placeholder = dist.get_command_obj('install').install_purelib
_create_fake_setuptools_pkg_info(placeholder)
def _create_fake_setuptools_pkg_info(placeholder):
if not placeholder or not os.path.exists(placeholder):
log.warn('Could not find the install location')
return
pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
setuptools_file = 'setuptools-%s-py%s.egg-info' % \
(SETUPTOOLS_FAKED_VERSION, pyver)
pkg_info = os.path.join(placeholder, setuptools_file)
if os.path.exists(pkg_info):
log.warn('%s already exists', pkg_info)
return
log.warn('Creating %s', pkg_info)
try:
f = open(pkg_info, 'w')
except EnvironmentError:
log.warn("Don't have permissions to write %s, skipping", pkg_info)
return
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
pth_file = os.path.join(placeholder, 'setuptools.pth')
log.warn('Creating %s', pth_file)
f = open(pth_file, 'w')
try:
f.write(os.path.join(os.curdir, setuptools_file))
finally:
f.close()
_create_fake_setuptools_pkg_info = _no_sandbox(
_create_fake_setuptools_pkg_info
)
def _patch_egg_dir(path):
# let's check if it's already patched
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
if os.path.exists(pkg_info):
if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
log.warn('%s already patched.', pkg_info)
return False
_rename_path(path)
os.mkdir(path)
os.mkdir(os.path.join(path, 'EGG-INFO'))
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
f = open(pkg_info, 'w')
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
return True
_patch_egg_dir = _no_sandbox(_patch_egg_dir)
def _before_install():
log.warn('Before install bootstrap.')
_fake_setuptools()
def _under_prefix(location):
if 'install' not in sys.argv:
return True
args = sys.argv[sys.argv.index('install') + 1:]
for index, arg in enumerate(args):
for option in ('--root', '--prefix'):
if arg.startswith('%s=' % option):
top_dir = arg.split('root=')[-1]
return location.startswith(top_dir)
elif arg == option:
if len(args) > index:
top_dir = args[index + 1]
return location.startswith(top_dir)
if arg == '--user' and USER_SITE is not None:
return location.startswith(USER_SITE)
return True
def _fake_setuptools():
log.warn('Scanning installed packages')
try:
import pkg_resources
except ImportError:
# we're cool
log.warn('Setuptools or Distribute does not seem to be installed.')
return
ws = pkg_resources.working_set
try:
setuptools_dist = ws.find(
pkg_resources.Requirement.parse('setuptools', replacement=False)
)
except TypeError:
# old distribute API
setuptools_dist = ws.find(
pkg_resources.Requirement.parse('setuptools')
)
if setuptools_dist is None:
log.warn('No setuptools distribution found')
return
# detecting if it was already faked
setuptools_location = setuptools_dist.location
log.warn('Setuptools installation detected at %s', setuptools_location)
# if --root or --preix was provided, and if
# setuptools is not located in them, we don't patch it
if not _under_prefix(setuptools_location):
log.warn('Not patching, --root or --prefix is installing Distribute'
' in another location')
return
# let's see if its an egg
if not setuptools_location.endswith('.egg'):
log.warn('Non-egg installation')
res = _remove_flat_installation(setuptools_location)
if not res:
return
else:
log.warn('Egg installation')
pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
if (os.path.exists(pkg_info) and
_same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
log.warn('Already patched.')
return
log.warn('Patching...')
# let's create a fake egg replacing setuptools one
res = _patch_egg_dir(setuptools_location)
if not res:
return
log.warn('Patching complete.')
_relaunch()
def _relaunch():
log.warn('Relaunching...')
# we have to relaunch the process
# pip marker to avoid a relaunch bug
_cmd1 = ['-c', 'install', '--single-version-externally-managed']
_cmd2 = ['-c', 'install', '--record']
if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2:
sys.argv[0] = 'setup.py'
args = [sys.executable] + sys.argv
sys.exit(subprocess.call(args))
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
if sys.version_info < (2, 4):
def sorter(dir1, dir2):
return cmp(dir1.name, dir2.name)
directories.sort(sorter)
directories.reverse()
else:
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError:
e = sys.exc_info()[1]
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the distribute package
"""
install_args = []
if options.user_install:
if sys.version_info < (2, 6):
log.warn("--user requires Python 2.6 or later")
raise SystemExit(1)
install_args.append('--user')
return install_args
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the distribute package')
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main(version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
tarball = download_setuptools(download_base=options.download_base)
return _install(tarball, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main())
|
|
# Copyright (c) 2012 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For CellsManager
"""
import copy
import datetime
import mock
from oslo_config import cfg
from oslo_utils import timeutils
from nova.cells import messaging
from nova.cells import utils as cells_utils
from nova import context
from nova import objects
from nova import test
from nova.tests.unit.cells import fakes
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_server_actions
from nova.tests.unit.objects import test_flavor
CONF = cfg.CONF
CONF.import_opt('compute_topic', 'nova.compute.rpcapi')
FAKE_COMPUTE_NODES = [dict(id=1, host='host1'), dict(id=2, host='host2')]
FAKE_SERVICES = [dict(id=1, host='host1',
compute_node=[FAKE_COMPUTE_NODES[0]]),
dict(id=2, host='host2',
compute_node=[FAKE_COMPUTE_NODES[1]]),
dict(id=3, host='host3', compute_node=[])]
FAKE_TASK_LOGS = [dict(id=1, host='host1'),
dict(id=2, host='host2')]
class CellsManagerClassTestCase(test.NoDBTestCase):
"""Test case for CellsManager class."""
def setUp(self):
super(CellsManagerClassTestCase, self).setUp()
fakes.init(self)
# pick a child cell to use for tests.
self.our_cell = 'grandchild-cell1'
self.cells_manager = fakes.get_cells_manager(self.our_cell)
self.msg_runner = self.cells_manager.msg_runner
self.state_manager = fakes.get_state_manager(self.our_cell)
self.driver = self.cells_manager.driver
self.ctxt = 'fake_context'
def _get_fake_response(self, raw_response=None, exc=False):
if exc:
return messaging.Response('fake', test.TestingException(),
True)
if raw_response is None:
raw_response = 'fake-response'
return messaging.Response('fake', raw_response, False)
def test_get_cell_info_for_neighbors(self):
self.mox.StubOutWithMock(self.cells_manager.state_manager,
'get_cell_info_for_neighbors')
self.cells_manager.state_manager.get_cell_info_for_neighbors()
self.mox.ReplayAll()
self.cells_manager.get_cell_info_for_neighbors(self.ctxt)
def test_post_start_hook_child_cell(self):
self.mox.StubOutWithMock(self.driver, 'start_servers')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(self.cells_manager, '_update_our_parents')
self.driver.start_servers(self.msg_runner)
context.get_admin_context().AndReturn(self.ctxt)
self.cells_manager._update_our_parents(self.ctxt)
self.mox.ReplayAll()
self.cells_manager.post_start_hook()
def test_post_start_hook_middle_cell(self):
cells_manager = fakes.get_cells_manager('child-cell2')
msg_runner = cells_manager.msg_runner
driver = cells_manager.driver
self.mox.StubOutWithMock(driver, 'start_servers')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(msg_runner,
'ask_children_for_capabilities')
self.mox.StubOutWithMock(msg_runner,
'ask_children_for_capacities')
driver.start_servers(msg_runner)
context.get_admin_context().AndReturn(self.ctxt)
msg_runner.ask_children_for_capabilities(self.ctxt)
msg_runner.ask_children_for_capacities(self.ctxt)
self.mox.ReplayAll()
cells_manager.post_start_hook()
def test_update_our_parents(self):
self.mox.StubOutWithMock(self.msg_runner,
'tell_parents_our_capabilities')
self.mox.StubOutWithMock(self.msg_runner,
'tell_parents_our_capacities')
self.msg_runner.tell_parents_our_capabilities(self.ctxt)
self.msg_runner.tell_parents_our_capacities(self.ctxt)
self.mox.ReplayAll()
self.cells_manager._update_our_parents(self.ctxt)
def test_build_instances(self):
build_inst_kwargs = {'instances': [objects.Instance(),
objects.Instance()]}
self.mox.StubOutWithMock(self.msg_runner, 'build_instances')
our_cell = self.msg_runner.state_manager.get_my_state()
self.msg_runner.build_instances(self.ctxt, our_cell, build_inst_kwargs)
self.mox.ReplayAll()
self.cells_manager.build_instances(self.ctxt,
build_inst_kwargs=build_inst_kwargs)
def test_build_instances_old_flavor(self):
flavor_dict = test_flavor.fake_flavor
args = {'filter_properties': {'instance_type': flavor_dict},
'instances': [objects.Instance()]}
with mock.patch.object(self.msg_runner, 'build_instances') as mock_bi:
self.cells_manager.build_instances(self.ctxt,
build_inst_kwargs=args)
filter_properties = mock_bi.call_args[0][2]['filter_properties']
self.assertIsInstance(filter_properties['instance_type'],
objects.Flavor)
def test_build_instances_old_instances(self):
args = {'instances': [fake_instance.fake_db_instance()]}
with mock.patch.object(self.msg_runner, 'build_instances') as mock_bi:
self.cells_manager.build_instances(self.ctxt,
build_inst_kwargs=args)
self.assertIsInstance(mock_bi.call_args[0][2]['instances'][0],
objects.Instance)
def test_run_compute_api_method(self):
# Args should just be silently passed through
cell_name = 'fake-cell-name'
method_info = 'fake-method-info'
self.mox.StubOutWithMock(self.msg_runner,
'run_compute_api_method')
fake_response = self._get_fake_response()
self.msg_runner.run_compute_api_method(self.ctxt,
cell_name,
method_info,
True).AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.run_compute_api_method(
self.ctxt, cell_name=cell_name, method_info=method_info,
call=True)
self.assertEqual('fake-response', response)
def test_instance_update_at_top(self):
self.mox.StubOutWithMock(self.msg_runner, 'instance_update_at_top')
self.msg_runner.instance_update_at_top(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.instance_update_at_top(self.ctxt,
instance='fake-instance')
def test_instance_destroy_at_top(self):
self.mox.StubOutWithMock(self.msg_runner, 'instance_destroy_at_top')
self.msg_runner.instance_destroy_at_top(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.instance_destroy_at_top(self.ctxt,
instance='fake-instance')
def test_instance_delete_everywhere(self):
self.mox.StubOutWithMock(self.msg_runner,
'instance_delete_everywhere')
self.msg_runner.instance_delete_everywhere(self.ctxt,
'fake-instance',
'fake-type')
self.mox.ReplayAll()
self.cells_manager.instance_delete_everywhere(
self.ctxt, instance='fake-instance',
delete_type='fake-type')
def test_instance_fault_create_at_top(self):
self.mox.StubOutWithMock(self.msg_runner,
'instance_fault_create_at_top')
self.msg_runner.instance_fault_create_at_top(self.ctxt,
'fake-fault')
self.mox.ReplayAll()
self.cells_manager.instance_fault_create_at_top(
self.ctxt, instance_fault='fake-fault')
def test_bw_usage_update_at_top(self):
self.mox.StubOutWithMock(self.msg_runner,
'bw_usage_update_at_top')
self.msg_runner.bw_usage_update_at_top(self.ctxt,
'fake-bw-info')
self.mox.ReplayAll()
self.cells_manager.bw_usage_update_at_top(
self.ctxt, bw_update_info='fake-bw-info')
def test_heal_instances(self):
self.flags(instance_updated_at_threshold=1000,
instance_update_num_instances=2,
group='cells')
fake_context = context.RequestContext('fake', 'fake')
stalled_time = timeutils.utcnow()
updated_since = stalled_time - datetime.timedelta(seconds=1000)
def utcnow():
return stalled_time
call_info = {'get_instances': 0, 'sync_instances': []}
instances = ['instance1', 'instance2', 'instance3']
def get_instances_to_sync(context, **kwargs):
self.assertEqual(context, fake_context)
call_info['shuffle'] = kwargs.get('shuffle')
call_info['project_id'] = kwargs.get('project_id')
call_info['updated_since'] = kwargs.get('updated_since')
call_info['get_instances'] += 1
return iter(instances)
def instance_get_by_uuid(context, uuid):
return instances[int(uuid[-1]) - 1]
def sync_instance(context, instance):
self.assertEqual(context, fake_context)
call_info['sync_instances'].append(instance)
self.stubs.Set(cells_utils, 'get_instances_to_sync',
get_instances_to_sync)
self.stubs.Set(self.cells_manager.db, 'instance_get_by_uuid',
instance_get_by_uuid)
self.stubs.Set(self.cells_manager, '_sync_instance',
sync_instance)
self.stubs.Set(timeutils, 'utcnow', utcnow)
self.cells_manager._heal_instances(fake_context)
self.assertEqual(call_info['shuffle'], True)
self.assertIsNone(call_info['project_id'])
self.assertEqual(call_info['updated_since'], updated_since)
self.assertEqual(call_info['get_instances'], 1)
# Only first 2
self.assertEqual(call_info['sync_instances'],
instances[:2])
call_info['sync_instances'] = []
self.cells_manager._heal_instances(fake_context)
self.assertEqual(call_info['shuffle'], True)
self.assertIsNone(call_info['project_id'])
self.assertEqual(call_info['updated_since'], updated_since)
self.assertEqual(call_info['get_instances'], 2)
# Now the last 1 and the first 1
self.assertEqual(call_info['sync_instances'],
[instances[-1], instances[0]])
def test_sync_instances(self):
self.mox.StubOutWithMock(self.msg_runner,
'sync_instances')
self.msg_runner.sync_instances(self.ctxt, 'fake-project',
'fake-time', 'fake-deleted')
self.mox.ReplayAll()
self.cells_manager.sync_instances(self.ctxt,
project_id='fake-project',
updated_since='fake-time',
deleted='fake-deleted')
def test_service_get_all(self):
responses = []
expected_response = []
# 3 cells... so 3 responses. Each response is a list of services.
# Manager should turn these into a single list of responses.
for i in xrange(3):
cell_name = 'path!to!cell%i' % i
services = []
for service in FAKE_SERVICES:
services.append(copy.deepcopy(service))
expected_service = copy.deepcopy(service)
cells_utils.add_cell_to_service(expected_service, cell_name)
expected_response.append(expected_service)
response = messaging.Response(cell_name, services, False)
responses.append(response)
self.mox.StubOutWithMock(self.msg_runner,
'service_get_all')
self.msg_runner.service_get_all(self.ctxt,
'fake-filters').AndReturn(responses)
self.mox.ReplayAll()
response = self.cells_manager.service_get_all(self.ctxt,
filters='fake-filters')
self.assertEqual(expected_response, response)
def test_service_get_by_compute_host(self):
self.mox.StubOutWithMock(self.msg_runner,
'service_get_by_compute_host')
fake_cell = 'fake-cell'
fake_response = messaging.Response(fake_cell, FAKE_SERVICES[0],
False)
expected_response = copy.deepcopy(FAKE_SERVICES[0])
cells_utils.add_cell_to_service(expected_response, fake_cell)
cell_and_host = cells_utils.cell_with_item('fake-cell', 'fake-host')
self.msg_runner.service_get_by_compute_host(self.ctxt,
fake_cell, 'fake-host').AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.service_get_by_compute_host(self.ctxt,
host_name=cell_and_host)
self.assertEqual(expected_response, response)
def test_get_host_uptime(self):
fake_cell = 'parent!fake-cell'
fake_host = 'fake-host'
fake_cell_and_host = cells_utils.cell_with_item(fake_cell, fake_host)
host_uptime = (" 08:32:11 up 93 days, 18:25, 12 users, load average:"
" 0.20, 0.12, 0.14")
fake_response = messaging.Response(fake_cell, host_uptime, False)
self.mox.StubOutWithMock(self.msg_runner,
'get_host_uptime')
self.msg_runner.get_host_uptime(self.ctxt, fake_cell, fake_host).\
AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.get_host_uptime(self.ctxt,
fake_cell_and_host)
self.assertEqual(host_uptime, response)
def test_service_update(self):
fake_cell = 'fake-cell'
fake_response = messaging.Response(
fake_cell, FAKE_SERVICES[0], False)
expected_response = copy.deepcopy(FAKE_SERVICES[0])
cells_utils.add_cell_to_service(expected_response, fake_cell)
cell_and_host = cells_utils.cell_with_item('fake-cell', 'fake-host')
params_to_update = {'disabled': True}
self.mox.StubOutWithMock(self.msg_runner, 'service_update')
self.msg_runner.service_update(self.ctxt,
fake_cell, 'fake-host', 'nova-api',
params_to_update).AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.service_update(
self.ctxt, host_name=cell_and_host, binary='nova-api',
params_to_update=params_to_update)
self.assertEqual(expected_response, response)
def test_service_delete(self):
fake_cell = 'fake-cell'
service_id = '1'
cell_service_id = cells_utils.cell_with_item(fake_cell, service_id)
with mock.patch.object(self.msg_runner,
'service_delete') as service_delete:
self.cells_manager.service_delete(self.ctxt, cell_service_id)
service_delete.assert_called_once_with(
self.ctxt, fake_cell, service_id)
def test_proxy_rpc_to_manager(self):
self.mox.StubOutWithMock(self.msg_runner,
'proxy_rpc_to_manager')
fake_response = self._get_fake_response()
cell_and_host = cells_utils.cell_with_item('fake-cell', 'fake-host')
topic = "%s.%s" % (CONF.compute_topic, cell_and_host)
self.msg_runner.proxy_rpc_to_manager(self.ctxt, 'fake-cell',
'fake-host', topic, 'fake-rpc-msg',
True, -1).AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.proxy_rpc_to_manager(self.ctxt,
topic=topic, rpc_message='fake-rpc-msg', call=True,
timeout=-1)
self.assertEqual('fake-response', response)
def _build_task_log_responses(self, num):
responses = []
expected_response = []
# 3 cells... so 3 responses. Each response is a list of task log
# entries. Manager should turn these into a single list of
# task log entries.
for i in xrange(num):
cell_name = 'path!to!cell%i' % i
task_logs = []
for task_log in FAKE_TASK_LOGS:
task_logs.append(copy.deepcopy(task_log))
expected_task_log = copy.deepcopy(task_log)
cells_utils.add_cell_to_task_log(expected_task_log,
cell_name)
expected_response.append(expected_task_log)
response = messaging.Response(cell_name, task_logs, False)
responses.append(response)
return expected_response, responses
def test_task_log_get_all(self):
expected_response, responses = self._build_task_log_responses(3)
self.mox.StubOutWithMock(self.msg_runner,
'task_log_get_all')
self.msg_runner.task_log_get_all(self.ctxt, None,
'fake-name', 'fake-begin',
'fake-end', host=None, state=None).AndReturn(responses)
self.mox.ReplayAll()
response = self.cells_manager.task_log_get_all(self.ctxt,
task_name='fake-name',
period_beginning='fake-begin', period_ending='fake-end')
self.assertEqual(expected_response, response)
def test_task_log_get_all_with_filters(self):
expected_response, responses = self._build_task_log_responses(1)
cell_and_host = cells_utils.cell_with_item('fake-cell', 'fake-host')
self.mox.StubOutWithMock(self.msg_runner,
'task_log_get_all')
self.msg_runner.task_log_get_all(self.ctxt, 'fake-cell',
'fake-name', 'fake-begin', 'fake-end', host='fake-host',
state='fake-state').AndReturn(responses)
self.mox.ReplayAll()
response = self.cells_manager.task_log_get_all(self.ctxt,
task_name='fake-name',
period_beginning='fake-begin', period_ending='fake-end',
host=cell_and_host, state='fake-state')
self.assertEqual(expected_response, response)
def test_task_log_get_all_with_cell_but_no_host_filters(self):
expected_response, responses = self._build_task_log_responses(1)
# Host filter only has cell name.
cell_and_host = 'fake-cell'
self.mox.StubOutWithMock(self.msg_runner,
'task_log_get_all')
self.msg_runner.task_log_get_all(self.ctxt, 'fake-cell',
'fake-name', 'fake-begin', 'fake-end', host=None,
state='fake-state').AndReturn(responses)
self.mox.ReplayAll()
response = self.cells_manager.task_log_get_all(self.ctxt,
task_name='fake-name',
period_beginning='fake-begin', period_ending='fake-end',
host=cell_and_host, state='fake-state')
self.assertEqual(expected_response, response)
def test_compute_node_get_all(self):
responses = []
expected_response = []
# 3 cells... so 3 responses. Each response is a list of computes.
# Manager should turn these into a single list of responses.
for i in xrange(3):
cell_name = 'path!to!cell%i' % i
compute_nodes = []
for compute_node in FAKE_COMPUTE_NODES:
compute_nodes.append(copy.deepcopy(compute_node))
expected_compute_node = copy.deepcopy(compute_node)
cells_utils.add_cell_to_compute_node(expected_compute_node,
cell_name)
expected_response.append(expected_compute_node)
response = messaging.Response(cell_name, compute_nodes, False)
responses.append(response)
self.mox.StubOutWithMock(self.msg_runner,
'compute_node_get_all')
self.msg_runner.compute_node_get_all(self.ctxt,
hypervisor_match='fake-match').AndReturn(responses)
self.mox.ReplayAll()
response = self.cells_manager.compute_node_get_all(self.ctxt,
hypervisor_match='fake-match')
self.assertEqual(expected_response, response)
def test_compute_node_stats(self):
raw_resp1 = {'key1': 1, 'key2': 2}
raw_resp2 = {'key2': 1, 'key3': 2}
raw_resp3 = {'key3': 1, 'key4': 2}
responses = [messaging.Response('cell1', raw_resp1, False),
messaging.Response('cell2', raw_resp2, False),
messaging.Response('cell2', raw_resp3, False)]
expected_resp = {'key1': 1, 'key2': 3, 'key3': 3, 'key4': 2}
self.mox.StubOutWithMock(self.msg_runner,
'compute_node_stats')
self.msg_runner.compute_node_stats(self.ctxt).AndReturn(responses)
self.mox.ReplayAll()
response = self.cells_manager.compute_node_stats(self.ctxt)
self.assertEqual(expected_resp, response)
def test_compute_node_get(self):
fake_cell = 'fake-cell'
fake_response = messaging.Response(fake_cell,
FAKE_COMPUTE_NODES[0],
False)
expected_response = copy.deepcopy(FAKE_COMPUTE_NODES[0])
cells_utils.add_cell_to_compute_node(expected_response, fake_cell)
cell_and_id = cells_utils.cell_with_item(fake_cell, 'fake-id')
self.mox.StubOutWithMock(self.msg_runner,
'compute_node_get')
self.msg_runner.compute_node_get(self.ctxt,
'fake-cell', 'fake-id').AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.compute_node_get(self.ctxt,
compute_id=cell_and_id)
self.assertEqual(expected_response, response)
def test_actions_get(self):
fake_uuid = fake_server_actions.FAKE_UUID
fake_req_id = fake_server_actions.FAKE_REQUEST_ID1
fake_act = fake_server_actions.FAKE_ACTIONS[fake_uuid][fake_req_id]
fake_response = messaging.Response('fake-cell', [fake_act], False)
expected_response = [fake_act]
self.mox.StubOutWithMock(self.msg_runner, 'actions_get')
self.msg_runner.actions_get(self.ctxt, 'fake-cell',
'fake-uuid').AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.actions_get(self.ctxt, 'fake-cell',
'fake-uuid')
self.assertEqual(expected_response, response)
def test_action_get_by_request_id(self):
fake_uuid = fake_server_actions.FAKE_UUID
fake_req_id = fake_server_actions.FAKE_REQUEST_ID1
fake_act = fake_server_actions.FAKE_ACTIONS[fake_uuid][fake_req_id]
fake_response = messaging.Response('fake-cell', fake_act, False)
expected_response = fake_act
self.mox.StubOutWithMock(self.msg_runner, 'action_get_by_request_id')
self.msg_runner.action_get_by_request_id(self.ctxt, 'fake-cell',
'fake-uuid', 'req-fake').AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.action_get_by_request_id(self.ctxt,
'fake-cell',
'fake-uuid',
'req-fake')
self.assertEqual(expected_response, response)
def test_action_events_get(self):
fake_action_id = fake_server_actions.FAKE_ACTION_ID1
fake_events = fake_server_actions.FAKE_EVENTS[fake_action_id]
fake_response = messaging.Response('fake-cell', fake_events, False)
expected_response = fake_events
self.mox.StubOutWithMock(self.msg_runner, 'action_events_get')
self.msg_runner.action_events_get(self.ctxt, 'fake-cell',
'fake-action').AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.action_events_get(self.ctxt, 'fake-cell',
'fake-action')
self.assertEqual(expected_response, response)
def test_consoleauth_delete_tokens(self):
instance_uuid = 'fake-instance-uuid'
self.mox.StubOutWithMock(self.msg_runner,
'consoleauth_delete_tokens')
self.msg_runner.consoleauth_delete_tokens(self.ctxt, instance_uuid)
self.mox.ReplayAll()
self.cells_manager.consoleauth_delete_tokens(self.ctxt,
instance_uuid=instance_uuid)
def test_get_capacities(self):
cell_name = 'cell_name'
response = {"ram_free":
{"units_by_mb": {"64": 20, "128": 10}, "total_mb": 1491}}
self.mox.StubOutWithMock(self.state_manager,
'get_capacities')
self.state_manager.get_capacities(cell_name).AndReturn(response)
self.mox.ReplayAll()
self.assertEqual(response,
self.cells_manager.get_capacities(self.ctxt, cell_name))
def test_validate_console_port(self):
instance_uuid = 'fake-instance-uuid'
cell_name = 'fake-cell-name'
instance = {'cell_name': cell_name}
console_port = 'fake-console-port'
console_type = 'fake-console-type'
self.mox.StubOutWithMock(self.msg_runner,
'validate_console_port')
self.mox.StubOutWithMock(self.cells_manager.db,
'instance_get_by_uuid')
fake_response = self._get_fake_response()
self.cells_manager.db.instance_get_by_uuid(self.ctxt,
instance_uuid).AndReturn(instance)
self.msg_runner.validate_console_port(self.ctxt, cell_name,
instance_uuid, console_port,
console_type).AndReturn(fake_response)
self.mox.ReplayAll()
response = self.cells_manager.validate_console_port(self.ctxt,
instance_uuid=instance_uuid, console_port=console_port,
console_type=console_type)
self.assertEqual('fake-response', response)
def test_bdm_update_or_create_at_top(self):
self.mox.StubOutWithMock(self.msg_runner,
'bdm_update_or_create_at_top')
self.msg_runner.bdm_update_or_create_at_top(self.ctxt,
'fake-bdm',
create='foo')
self.mox.ReplayAll()
self.cells_manager.bdm_update_or_create_at_top(self.ctxt,
'fake-bdm',
create='foo')
def test_bdm_destroy_at_top(self):
self.mox.StubOutWithMock(self.msg_runner, 'bdm_destroy_at_top')
self.msg_runner.bdm_destroy_at_top(self.ctxt,
'fake_instance_uuid',
device_name='fake_device_name',
volume_id='fake_volume_id')
self.mox.ReplayAll()
self.cells_manager.bdm_destroy_at_top(self.ctxt,
'fake_instance_uuid',
device_name='fake_device_name',
volume_id='fake_volume_id')
def test_get_migrations(self):
filters = {'status': 'confirmed'}
cell1_migrations = [{'id': 123}]
cell2_migrations = [{'id': 456}]
fake_responses = [self._get_fake_response(cell1_migrations),
self._get_fake_response(cell2_migrations)]
self.mox.StubOutWithMock(self.msg_runner,
'get_migrations')
self.msg_runner.get_migrations(self.ctxt, None, False, filters).\
AndReturn(fake_responses)
self.mox.ReplayAll()
response = self.cells_manager.get_migrations(self.ctxt, filters)
self.assertEqual([cell1_migrations[0], cell2_migrations[0]], response)
def test_get_migrations_for_a_given_cell(self):
filters = {'status': 'confirmed', 'cell_name': 'ChildCell1'}
target_cell = '%s%s%s' % (CONF.cells.name, '!', filters['cell_name'])
migrations = [{'id': 123}]
fake_responses = [self._get_fake_response(migrations)]
self.mox.StubOutWithMock(self.msg_runner,
'get_migrations')
self.msg_runner.get_migrations(self.ctxt, target_cell, False,
filters).AndReturn(fake_responses)
self.mox.ReplayAll()
response = self.cells_manager.get_migrations(self.ctxt, filters)
self.assertEqual(migrations, response)
def test_instance_update_from_api(self):
self.mox.StubOutWithMock(self.msg_runner,
'instance_update_from_api')
self.msg_runner.instance_update_from_api(self.ctxt,
'fake-instance',
'exp_vm', 'exp_task',
'admin_reset')
self.mox.ReplayAll()
self.cells_manager.instance_update_from_api(
self.ctxt, instance='fake-instance',
expected_vm_state='exp_vm',
expected_task_state='exp_task',
admin_state_reset='admin_reset')
def test_start_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'start_instance')
self.msg_runner.start_instance(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.start_instance(self.ctxt, instance='fake-instance')
def test_stop_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'stop_instance')
self.msg_runner.stop_instance(self.ctxt, 'fake-instance',
do_cast='meow',
clean_shutdown='purr')
self.mox.ReplayAll()
self.cells_manager.stop_instance(self.ctxt,
instance='fake-instance',
do_cast='meow',
clean_shutdown='purr')
def test_cell_create(self):
values = 'values'
response = 'created_cell'
self.mox.StubOutWithMock(self.state_manager,
'cell_create')
self.state_manager.cell_create(self.ctxt, values).\
AndReturn(response)
self.mox.ReplayAll()
self.assertEqual(response,
self.cells_manager.cell_create(self.ctxt, values))
def test_cell_update(self):
cell_name = 'cell_name'
values = 'values'
response = 'updated_cell'
self.mox.StubOutWithMock(self.state_manager,
'cell_update')
self.state_manager.cell_update(self.ctxt, cell_name, values).\
AndReturn(response)
self.mox.ReplayAll()
self.assertEqual(response,
self.cells_manager.cell_update(self.ctxt, cell_name,
values))
def test_cell_delete(self):
cell_name = 'cell_name'
response = 1
self.mox.StubOutWithMock(self.state_manager,
'cell_delete')
self.state_manager.cell_delete(self.ctxt, cell_name).\
AndReturn(response)
self.mox.ReplayAll()
self.assertEqual(response,
self.cells_manager.cell_delete(self.ctxt, cell_name))
def test_cell_get(self):
cell_name = 'cell_name'
response = 'cell_info'
self.mox.StubOutWithMock(self.state_manager,
'cell_get')
self.state_manager.cell_get(self.ctxt, cell_name).\
AndReturn(response)
self.mox.ReplayAll()
self.assertEqual(response,
self.cells_manager.cell_get(self.ctxt, cell_name))
def test_reboot_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'reboot_instance')
self.msg_runner.reboot_instance(self.ctxt, 'fake-instance',
'HARD')
self.mox.ReplayAll()
self.cells_manager.reboot_instance(self.ctxt,
instance='fake-instance',
reboot_type='HARD')
def test_suspend_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'suspend_instance')
self.msg_runner.suspend_instance(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.suspend_instance(self.ctxt,
instance='fake-instance')
def test_resume_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'resume_instance')
self.msg_runner.resume_instance(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.resume_instance(self.ctxt,
instance='fake-instance')
def test_terminate_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'terminate_instance')
self.msg_runner.terminate_instance(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.terminate_instance(self.ctxt,
instance='fake-instance')
def test_soft_delete_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'soft_delete_instance')
self.msg_runner.soft_delete_instance(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.soft_delete_instance(self.ctxt,
instance='fake-instance')
def _test_resize_instance(self, clean_shutdown=True):
self.mox.StubOutWithMock(self.msg_runner, 'resize_instance')
self.msg_runner.resize_instance(self.ctxt, 'fake-instance',
'fake-flavor', 'fake-updates',
clean_shutdown=clean_shutdown)
self.mox.ReplayAll()
self.cells_manager.resize_instance(
self.ctxt, instance='fake-instance', flavor='fake-flavor',
extra_instance_updates='fake-updates',
clean_shutdown=clean_shutdown)
def test_resize_instance(self):
self._test_resize_instance()
def test_resize_instance_forced_shutdown(self):
self._test_resize_instance(clean_shutdown=False)
def test_live_migrate_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'live_migrate_instance')
self.msg_runner.live_migrate_instance(self.ctxt, 'fake-instance',
'fake-block', 'fake-commit',
'fake-host')
self.mox.ReplayAll()
self.cells_manager.live_migrate_instance(
self.ctxt, instance='fake-instance',
block_migration='fake-block', disk_over_commit='fake-commit',
host_name='fake-host')
def test_revert_resize(self):
self.mox.StubOutWithMock(self.msg_runner, 'revert_resize')
self.msg_runner.revert_resize(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.revert_resize(self.ctxt, instance='fake-instance')
def test_confirm_resize(self):
self.mox.StubOutWithMock(self.msg_runner, 'confirm_resize')
self.msg_runner.confirm_resize(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.confirm_resize(self.ctxt, instance='fake-instance')
def test_reset_network(self):
self.mox.StubOutWithMock(self.msg_runner, 'reset_network')
self.msg_runner.reset_network(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.reset_network(self.ctxt, instance='fake-instance')
def test_inject_network_info(self):
self.mox.StubOutWithMock(self.msg_runner, 'inject_network_info')
self.msg_runner.inject_network_info(self.ctxt, 'fake-instance')
self.mox.ReplayAll()
self.cells_manager.inject_network_info(self.ctxt,
instance='fake-instance')
def test_snapshot_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'snapshot_instance')
self.msg_runner.snapshot_instance(self.ctxt, 'fake-instance',
'fake-id')
self.mox.ReplayAll()
self.cells_manager.snapshot_instance(self.ctxt,
instance='fake-instance',
image_id='fake-id')
def test_backup_instance(self):
self.mox.StubOutWithMock(self.msg_runner, 'backup_instance')
self.msg_runner.backup_instance(self.ctxt, 'fake-instance',
'fake-id', 'backup-type',
'rotation')
self.mox.ReplayAll()
self.cells_manager.backup_instance(self.ctxt,
instance='fake-instance',
image_id='fake-id',
backup_type='backup-type',
rotation='rotation')
def test_set_admin_password(self):
with mock.patch.object(self.msg_runner,
'set_admin_password') as set_admin_password:
self.cells_manager.set_admin_password(self.ctxt,
instance='fake-instance', new_pass='fake-password')
set_admin_password.assert_called_once_with(self.ctxt,
'fake-instance', 'fake-password')
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitPeeringsOperations(object):
"""ExpressRouteCircuitPeeringsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified peering from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCircuitPeering"
"""Gets the specified peering for the express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
peering_parameters, # type: "_models.ExpressRouteCircuitPeering"
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCircuitPeering"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(peering_parameters, 'ExpressRouteCircuitPeering')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
peering_parameters, # type: "_models.ExpressRouteCircuitPeering"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuitPeering"]
"""Creates or updates a peering in the specified express route circuits.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param peering_parameters: Parameters supplied to the create or update express route circuit
peering operation.
:type peering_parameters: ~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitPeering
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitPeering or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitPeering]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
peering_parameters=peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
circuit_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteCircuitPeeringListResult"]
"""Gets all peerings in a specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitPeeringListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_06_01.models.ExpressRouteCircuitPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings'} # type: ignore
|
|
#!/usr/bin/env python3
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import xml.etree.ElementTree as ET
import pytest
from datacite.errors import DataCiteError, DataCiteNotFoundError
from unittest.mock import patch, MagicMock
from . import constants
from ckanext.doi.lib.api import DataciteClient
def first_then(first, then):
'''
Convenience generator which yields the first parameter and then yields the then parameter
forever. Handy for MagicMock object side_effects where the first time the function is run first
parameter should be return and then the then parameter after that.
:param first: the first value to yield, once
:param then: the parameter to yield on subsequent next calls, forever
'''
yield first
while True:
yield then
@pytest.mark.ckan_config('ckanext.doi.prefix', 'testing')
class TestGenerateNewDOI:
'''
In each of these tests we could assert the number of calls to the db and datacite client mocks
but this feels like a bit too much of a reach into the internal logic of the function. We care
about the result based on the scenario not how it gets there (unless we think we should test
that the function checks the db before checking datacite for performance but I don't think this
is really needed tbh.
'''
def test_no_existing_dois(self):
# no dois in datacite
mock_client = MagicMock(metadata_get=MagicMock(side_effect=DataCiteNotFoundError()))
# no dois in the database
mock_read_doi = MagicMock(return_value=None)
with patch('ckanext.doi.lib.api.DataCiteMDSClient', MagicMock(return_value=mock_client)):
with patch('ckanext.doi.lib.api.DOIQuery.read_doi', mock_read_doi):
api = DataciteClient()
doi = api.generate_doi()
assert isinstance(doi, str)
# both the client and the database should be called once and only once (yes this
# goes against the comment at the start of this class but it felt relevant here to
# check that this was the case)
assert mock_client.metadata_get.call_count == 1
assert mock_read_doi.call_count == 1
def test_one_existing_db_doi(self):
# no dois in datacite
mock_client = MagicMock(metadata_get=MagicMock(side_effect=DataCiteNotFoundError()))
# one doi in the database that hits the first call, but then the next time is fine
mock_read_doi = MagicMock(side_effect=first_then(MagicMock(), None))
with patch('ckanext.doi.lib.api.DataCiteMDSClient', MagicMock(return_value=mock_client)):
with patch('ckanext.doi.lib.api.DOIQuery.read_doi', mock_read_doi):
api = DataciteClient()
doi = api.generate_doi()
assert isinstance(doi, str)
def test_one_existing_on_datacite(self):
# the first call to the datacite client returns a (mock) doi but then the next one succeeds
mock_client = MagicMock(
metadata_get=MagicMock(side_effect=first_then(MagicMock(), DataCiteNotFoundError())))
# no dois in the db
mock_read_doi = MagicMock(return_value=None)
with patch('ckanext.doi.lib.api.DataCiteMDSClient', MagicMock(return_value=mock_client)):
with patch('ckanext.doi.lib.api.DOIQuery.read_doi', mock_read_doi):
api = DataciteClient()
doi = api.generate_doi()
assert isinstance(doi, str)
def test_one_existing_on_datacite_and_one_in_the_db(self):
# the first call to the datacite client returns a (mock) doi but then the next one succeeds
mock_client = MagicMock(
metadata_get=MagicMock(side_effect=first_then(MagicMock(), DataCiteNotFoundError())))
# the first call to the db returns a result but then after that we're all good
mock_read_doi = MagicMock(side_effect=first_then(MagicMock(), None))
with patch('ckanext.doi.lib.api.DataCiteMDSClient', MagicMock(return_value=mock_client)):
with patch('ckanext.doi.lib.api.DOIQuery.read_doi', mock_read_doi):
api = DataciteClient()
doi = api.generate_doi()
assert isinstance(doi, str)
def test_it_fails_when_it_cannot_generate_a_unique_doi(self):
# the datacite client returns an existing (mock) doi every time, so unlikely!
mock_client = MagicMock(metadata_get=MagicMock())
# the db returns an existing (mock) doi every time, so unlikely!
mock_read_doi = MagicMock()
with patch('ckanext.doi.lib.api.DataCiteMDSClient', mock_client):
with patch('ckanext.doi.lib.api.DOIQuery.read_doi', mock_read_doi):
api = DataciteClient()
with pytest.raises(Exception, match='Failed to generate a DOI'):
api.generate_doi()
class MockDataciteMDSClient(object):
'''
Mock client so that we can replicate the functionality of the datacite API without actually
calling it. Specifically, you have to post the metadata before you post the doi.
'''
def __init__(self, *args, **kwargs):
self.metadata = set()
self.dois = set()
def doi_post(self, doi, *args, **kwargs):
if doi not in self.metadata:
raise DataCiteError()
self.dois.add(doi)
def metadata_post(self, xml_doc, *args, **kwargs):
tree = ET.fromstring(xml_doc)
doi = tree.findtext('{http://datacite.org/schema/kernel-4}identifier')
self.metadata.add(doi)
@pytest.mark.ckan_config('ckanext.doi.prefix', 'testing')
@patch('ckanext.doi.lib.api.DataCiteMDSClient', MockDataciteMDSClient)
@patch('ckanext.doi.lib.api.DOIQuery')
class TestMintNewDOI(object):
def test_datacite_api_order(self, mock_crud):
mock_crud.read_doi = MagicMock(return_value=None)
mock_crud.read_package = MagicMock(return_value=None)
api = DataciteClient()
doi = constants.XML_DICT['identifiers'][0]['identifier']
pkg_id = MagicMock()
with pytest.raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
def test_new_doi(self, mock_crud):
mock_crud.read_doi = MagicMock(return_value=None)
mock_crud.read_package = MagicMock(return_value=None)
api = DataciteClient()
doi = constants.XML_DICT['identifiers'][0]['identifier']
pkg_id = MagicMock()
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
assert mock_crud.create.called
assert not mock_crud.update_package.called
assert mock_crud.update_doi.called
def test_existing_doi(self, mock_crud):
mock_crud.read_doi = MagicMock(return_value=MagicMock())
mock_crud.read_package = MagicMock(return_value=None)
api = DataciteClient()
doi = constants.XML_DICT['identifiers'][0]['identifier']
pkg_id = MagicMock()
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
assert not mock_crud.create.called
assert not mock_crud.update_package.called
assert mock_crud.update_doi.called
def test_existing_package(self, mock_crud):
mock_crud.read_doi = MagicMock(return_value=None)
mock_crud.read_package = MagicMock(return_value=MagicMock())
api = DataciteClient()
doi = constants.XML_DICT['identifiers'][0]['identifier']
pkg_id = MagicMock()
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
assert not mock_crud.create.called
assert mock_crud.update_package.called
assert mock_crud.update_doi.called
def test_both_exist(self, mock_crud):
mock_crud.read_doi = MagicMock(return_value=MagicMock())
mock_crud.read_package = MagicMock(return_value=MagicMock())
api = DataciteClient()
doi = constants.XML_DICT['identifiers'][0]['identifier']
pkg_id = MagicMock()
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
assert not mock_crud.create.called
assert not mock_crud.update_package.called
assert mock_crud.update_doi.called
@pytest.mark.ckan_config('ckanext.doi.prefix', 'testing')
@pytest.mark.ckan_config('ckanext.doi.account_name', 'goat!')
@pytest.mark.ckan_config('ckanext.doi.account_password', 'hammocks?')
@patch('ckanext.doi.lib.api.DataCiteMDSClient')
class TestDataciteClientCreation(object):
@pytest.mark.ckan_config('ckanext.doi.test_mode', False)
def test_basics(self, mock_client):
DataciteClient()
assert mock_client.called_once_with(username='goat!', password='hammocks?',
prefix='testing', test_mode=False)
assert 'url' not in mock_client.call_args.kwargs
@pytest.mark.ckan_config('ckanext.doi.test_mode', True)
def test_test_mode_true(self, mock_client):
DataciteClient()
assert mock_client.called_once_with(username='goat!', password='hammocks?',
prefix='testing', test_mode=True,
url=DataciteClient.test_url)
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
JSON related utilities.
This module provides a few things:
1) A handy function for getting an object down to something that can be
JSON serialized. See to_primitive().
2) Wrappers around loads() and dumps(). The dumps() wrapper will
automatically use to_primitive() for you if needed.
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
is available.
'''
import codecs
import datetime
import functools
import inspect
import itertools
import sys
is_simplejson = False
if sys.version_info < (2, 7):
# On Python <= 2.6, json module is not C boosted, so try to use
# simplejson module if available
try:
import simplejson as json
# NOTE(mriedem): Make sure we have a new enough version of simplejson
# to support the namedobject_as_tuple argument. This can be removed
# in the Kilo release when python 2.6 support is dropped.
if 'namedtuple_as_object' in inspect.getargspec(json.dumps).args:
is_simplejson = True
else:
import json
except ImportError:
import json
else:
import json
import six
import six.moves.xmlrpc_client as xmlrpclib
from keystone.openstack.common import gettextutils
from keystone.openstack.common import importutils
from keystone.openstack.common import strutils
from keystone.openstack.common import timeutils
netaddr = importutils.try_import("netaddr")
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
_simple_types = (six.string_types + six.integer_types
+ (type(None), bool, float))
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
"""
# handle obvious types first - order of basic types determined by running
# full tests on nova project, resulting in the following counts:
# 572754 <type 'NoneType'>
# 460353 <type 'int'>
# 379632 <type 'unicode'>
# 274610 <type 'str'>
# 199918 <type 'dict'>
# 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
if isinstance(value, datetime.datetime):
if convert_datetime:
return timeutils.strtime(value)
else:
return value
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
if isinstance(value, dict):
return dict((k, recursive(v)) for k, v in six.iteritems(value))
elif isinstance(value, (list, tuple)):
return [recursive(lv) for lv in value]
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif isinstance(value, gettextutils.Message):
return value.data
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return recursive(list(value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
elif netaddr and isinstance(value, netaddr.IPAddress):
return six.text_type(value)
else:
if any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return six.text_type(value)
def dumps(value, default=to_primitive, **kwargs):
if is_simplejson:
kwargs['namedtuple_as_object'] = False
return json.dumps(value, default=default, **kwargs)
def dump(obj, fp, *args, **kwargs):
if is_simplejson:
kwargs['namedtuple_as_object'] = False
return json.dump(obj, fp, *args, **kwargs)
def loads(s, encoding='utf-8', **kwargs):
return json.loads(strutils.safe_decode(s, encoding), **kwargs)
def load(fp, encoding='utf-8', **kwargs):
return json.load(codecs.getreader(encoding)(fp), **kwargs)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append((__name__, 'dumps', TypeError,
'loads', ValueError, 'load'))
anyjson.force_implementation(__name__)
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for training routines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python import keras
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util as tf_test_util
from tensorflow.python.keras import testing_utils
from tensorflow.python.platform import test
from tensorflow.python.training.rmsprop import RMSPropOptimizer
class TrainingTest(test.TestCase):
def test_fit_on_arrays(self):
a = keras.layers.Input(shape=(3,), name='input_a')
b = keras.layers.Input(shape=(3,), name='input_b')
dense = keras.layers.Dense(4, name='dense')
c = dense(a)
d = dense(b)
e = keras.layers.Dropout(0.5, name='dropout')(c)
model = keras.models.Model([a, b], [d, e])
optimizer = RMSPropOptimizer(learning_rate=0.001)
loss = 'mse'
loss_weights = [1., 0.5]
metrics = ['mae']
model.compile(optimizer, loss, metrics=metrics, loss_weights=loss_weights)
input_a_np = np.random.random((10, 3))
input_b_np = np.random.random((10, 3))
output_d_np = np.random.random((10, 4))
output_e_np = np.random.random((10, 4))
# Test fit at different verbosity
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=1,
batch_size=5,
verbose=0)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=1,
batch_size=5,
verbose=1)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=2,
batch_size=5,
verbose=2)
# Test with validation data
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
validation_data=([input_a_np, input_b_np], [output_d_np,
output_e_np]),
epochs=1,
batch_size=5,
verbose=0)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
validation_data=([input_a_np, input_b_np], [output_d_np,
output_e_np]),
epochs=2,
batch_size=5,
verbose=1)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
validation_data=([input_a_np, input_b_np], [output_d_np,
output_e_np]),
epochs=2,
batch_size=5,
verbose=2)
model.train_on_batch([input_a_np, input_b_np], [output_d_np, output_e_np])
# Test with validation split
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=2,
batch_size=5,
verbose=0,
validation_split=0.2)
# Test with dictionary inputs
model.fit(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
epochs=1,
batch_size=5,
verbose=0)
model.fit(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
epochs=1,
batch_size=5,
verbose=1)
model.fit(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
validation_data=({'input_a': input_a_np,
'input_b': input_b_np
},
{
'dense': output_d_np,
'dropout': output_e_np
}),
epochs=1,
batch_size=5,
verbose=0)
model.train_on_batch({
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np})
# Test with lists for loss, metrics
loss = ['mae', 'mse']
metrics = ['acc', 'mae']
model.compile(optimizer, loss, metrics=metrics)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=1,
batch_size=5,
verbose=0)
# Test with dictionaries for loss, metrics, loss weights
loss = {'dense': 'mse', 'dropout': 'mae'}
loss_weights = {'dense': 1., 'dropout': 0.5}
metrics = {'dense': 'mse', 'dropout': 'mae'}
model.compile(optimizer, loss, metrics=metrics, loss_weights=loss_weights)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=1,
batch_size=5,
verbose=0)
# Invalid use cases
with self.assertRaises(AttributeError):
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=1,
validation_data=([input_a_np, input_b_np], 0, 0),
verbose=0)
with self.assertRaises(ValueError):
model.train_on_batch({'input_a': input_a_np},
[output_d_np, output_e_np])
with self.assertRaises(ValueError):
model.train_on_batch([input_a_np], [output_d_np, output_e_np])
with self.assertRaises(AttributeError):
model.train_on_batch(1, [output_d_np, output_e_np])
with self.assertRaises(ValueError):
model.train_on_batch(input_a_np, [output_d_np, output_e_np])
with self.assertRaises(ValueError):
bad_input = np.random.random((11, 3))
model.train_on_batch([bad_input, input_b_np],
[output_d_np, output_e_np])
with self.assertRaises(ValueError):
bad_target = np.random.random((11, 4))
model.train_on_batch([input_a_np, input_b_np],
[bad_target, output_e_np])
# Build single-input model
x = keras.layers.Input(shape=(3,), name='input_a')
y = keras.layers.Dense(4)(x)
model = keras.models.Model(x, y)
model.compile(optimizer=RMSPropOptimizer(learning_rate=0.001), loss='mse')
# This will work
model.fit([input_a_np], output_d_np, epochs=1)
with self.assertRaises(ValueError):
model.fit([input_a_np, input_a_np], output_d_np, epochs=1)
def test_evaluate_predict_on_arrays(self):
a = keras.layers.Input(shape=(3,), name='input_a')
b = keras.layers.Input(shape=(3,), name='input_b')
dense = keras.layers.Dense(4, name='dense')
c = dense(a)
d = dense(b)
e = keras.layers.Dropout(0.5, name='dropout')(c)
model = keras.models.Model([a, b], [d, e])
optimizer = RMSPropOptimizer(learning_rate=0.001)
loss = 'mse'
loss_weights = [1., 0.5]
metrics = ['acc', 'mae']
model.compile(
optimizer,
loss,
metrics=metrics,
loss_weights=loss_weights,
sample_weight_mode=None)
input_a_np = np.random.random((10, 3))
input_b_np = np.random.random((10, 3))
output_d_np = np.random.random((10, 4))
output_e_np = np.random.random((10, 4))
# Test evaluate at different verbosity
out = model.evaluate(
[input_a_np, input_b_np], [output_d_np, output_e_np],
batch_size=5,
verbose=0)
self.assertEqual(len(out), 7)
out = model.evaluate(
[input_a_np, input_b_np], [output_d_np, output_e_np],
batch_size=5,
verbose=1)
self.assertEqual(len(out), 7)
out = model.evaluate(
[input_a_np, input_b_np], [output_d_np, output_e_np],
batch_size=5,
verbose=2)
self.assertEqual(len(out), 7)
out = model.test_on_batch([input_a_np, input_b_np],
[output_d_np, output_e_np])
self.assertEqual(len(out), 7)
# Test evaluate with dictionary inputs
model.evaluate(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
batch_size=5,
verbose=0)
model.evaluate(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
batch_size=5,
verbose=1)
# Test predict
out = model.predict([input_a_np, input_b_np], batch_size=5)
self.assertEqual(len(out), 2)
out = model.predict({'input_a': input_a_np, 'input_b': input_b_np})
self.assertEqual(len(out), 2)
out = model.predict_on_batch({
'input_a': input_a_np,
'input_b': input_b_np
})
self.assertEqual(len(out), 2)
def test_invalid_loss_or_metrics(self):
num_classes = 5
train_samples = 1000
test_samples = 1000
input_dim = 5
model = keras.models.Sequential()
model.add(keras.layers.Dense(10, input_shape=(input_dim,)))
model.add(keras.layers.Activation('relu'))
model.add(keras.layers.Dense(num_classes))
model.add(keras.layers.Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=RMSPropOptimizer(learning_rate=0.001))
np.random.seed(1337)
(x_train, y_train), (_, _) = testing_utils.get_test_data(
train_samples=train_samples,
test_samples=test_samples,
input_shape=(input_dim,),
num_classes=num_classes)
with self.assertRaises(ValueError):
model.fit(x_train, np.concatenate([y_train, y_train], axis=-1))
with self.assertRaises(TypeError):
model.compile(loss='categorical_crossentropy',
optimizer=RMSPropOptimizer(learning_rate=0.001),
metrics=set(0))
with self.assertRaises(ValueError):
model.compile(loss=None,
optimizer='rms')
def test_model_methods_with_eager_tensors_multi_io(self):
a = keras.layers.Input(shape=(3,), name='input_a')
b = keras.layers.Input(shape=(3,), name='input_b')
dense = keras.layers.Dense(4, name='dense')
c = dense(a)
d = dense(b)
e = keras.layers.Dropout(0.5, name='dropout')(c)
model = keras.models.Model([a, b], [d, e])
optimizer = RMSPropOptimizer(learning_rate=0.001)
loss = 'mse'
loss_weights = [1., 0.5]
metrics = ['mae']
model.compile(
optimizer,
loss,
metrics=metrics,
loss_weights=loss_weights,
sample_weight_mode=None)
input_a = keras.backend.zeros(shape=(10, 3))
input_b = keras.backend.zeros(shape=(10, 3))
target_d = keras.backend.zeros(shape=(10, 4))
target_e = keras.backend.zeros(shape=(10, 4))
model.fit(
[input_a, input_b], [target_d, target_e],
epochs=1,
batch_size=5,
verbose=0)
# Test: no shuffle.
model.fit(
[input_a, input_b], [target_d, target_e],
epochs=1,
batch_size=5,
verbose=0,
shuffle=False)
# Test: validation data.
model.fit([input_a, input_b], [target_d, target_e],
epochs=1, batch_size=2, verbose=0,
validation_data=([input_a, input_b], [target_d, target_e]))
model.train_on_batch([input_a, input_b], [target_d, target_e])
model.predict([input_a, input_b], batch_size=5)
model.evaluate([input_a, input_b], [target_d, target_e],
batch_size=2, verbose=0)
model.test_on_batch([input_a, input_b], [target_d, target_e])
# Test: mix np and tensors.
input_b = np.zeros(shape=(10, 3)).astype('float32')
target_e = np.zeros(shape=(10, 4)).astype('float32')
model.fit(
[input_a, input_b], [target_d, target_e],
epochs=1,
batch_size=5,
verbose=0)
model.fit([input_a, input_b], [target_d, target_e],
epochs=1, batch_size=2, verbose=0,
validation_data=([input_a, input_b], [target_d, target_e]))
model.fit(
[input_a, input_b], [target_d, target_e],
epochs=1,
batch_size=5,
verbose=0,
shuffle=False)
model.train_on_batch([input_a, input_b], [target_d, target_e])
model.predict([input_a, input_b], batch_size=5)
model.evaluate([input_a, input_b], [target_d, target_e],
batch_size=2, verbose=0)
model.test_on_batch([input_a, input_b], [target_d, target_e])
def test_model_methods_with_eager_tensors_single_io(self):
x = keras.layers.Input(shape=(3,), name='input')
y = keras.layers.Dense(4, name='dense')(x)
model = keras.Model(x, y)
optimizer = RMSPropOptimizer(learning_rate=0.001)
loss = 'mse'
metrics = ['mae']
model.compile(optimizer, loss, metrics=metrics)
inputs = keras.backend.zeros(shape=(10, 3))
targets = keras.backend.zeros(shape=(10, 4))
model.fit(inputs, targets, epochs=1, batch_size=2, verbose=0)
model.fit(inputs, targets, epochs=1, batch_size=3, verbose=0, shuffle=False)
model.fit(inputs, targets, epochs=1, batch_size=4, verbose=0,
validation_data=(inputs, targets))
model.evaluate(inputs, targets, batch_size=2, verbose=0)
model.predict(inputs, batch_size=2)
model.train_on_batch(inputs, targets)
model.test_on_batch(inputs, targets)
def test_generator_methods(self):
model = keras.Sequential()
model.add(keras.layers.Dense(4, input_shape=(3,)))
optimizer = RMSPropOptimizer(learning_rate=0.001)
model.compile(optimizer, 'mse', metrics=['mae'])
x = np.random.random((10, 3))
y = np.random.random((10, 4))
def iterator():
while True:
yield x, y
model.fit_generator(iterator(), steps_per_epoch=3, epochs=1)
model.evaluate_generator(iterator(), steps=3)
out = model.predict_generator(iterator(), steps=3)
self.assertEqual(out.shape, (30, 4))
class LossWeightingTest(test.TestCase):
def test_class_weights(self):
num_classes = 5
batch_size = 5
weighted_class = 3
train_samples = 300
test_samples = 300
input_dim = 5
model = keras.models.Sequential()
model.add(keras.layers.Dense(10, input_shape=(input_dim,)))
model.add(keras.layers.Activation('relu'))
model.add(keras.layers.Dense(num_classes))
model.add(keras.layers.Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=RMSPropOptimizer(learning_rate=0.001))
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=train_samples,
test_samples=test_samples,
input_shape=(input_dim,),
num_classes=num_classes)
int_y_test = y_test.copy()
int_y_train = y_train.copy()
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
test_ids = np.where(int_y_test == np.array(weighted_class))[0]
class_weight = dict([(i, 1.) for i in range(num_classes)])
class_weight[weighted_class] = 4.
sample_weight = np.ones((y_train.shape[0]))
sample_weight[int_y_train == weighted_class] = 4.
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=2,
verbose=0,
class_weight=class_weight,
validation_data=(x_train, y_train, sample_weight))
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=2,
verbose=0,
class_weight=class_weight)
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=2,
verbose=0,
class_weight=class_weight,
validation_split=0.1)
model.train_on_batch(
x_train[:batch_size], y_train[:batch_size], class_weight=class_weight)
ref_score = model.evaluate(x_test, y_test, verbose=0)
score = model.evaluate(
x_test[test_ids, :], y_test[test_ids, :], verbose=0)
self.assertLess(score, ref_score)
def test_sample_weights(self):
num_classes = 5
batch_size = 5
weighted_class = 3
train_samples = 300
test_samples = 300
input_dim = 5
model = keras.models.Sequential()
model.add(keras.layers.Dense(10, input_shape=(input_dim,)))
model.add(keras.layers.Activation('relu'))
model.add(keras.layers.Dense(num_classes))
model.add(keras.layers.Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=RMSPropOptimizer(learning_rate=0.001))
np.random.seed(43)
(x_train, y_train), _ = testing_utils.get_test_data(
train_samples=train_samples,
test_samples=test_samples,
input_shape=(input_dim,),
num_classes=num_classes)
int_y_train = y_train.copy()
y_train = keras.utils.to_categorical(y_train, num_classes)
class_weight = dict([(i, 1.) for i in range(num_classes)])
class_weight[weighted_class] = 4.
sample_weight = np.ones((y_train.shape[0]))
sample_weight[int_y_train == weighted_class] = 4.
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=2,
verbose=0,
sample_weight=sample_weight)
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=2,
verbose=0,
sample_weight=sample_weight,
validation_split=0.1)
model.train_on_batch(
x_train[:batch_size],
y_train[:batch_size],
sample_weight=sample_weight[:batch_size])
model.test_on_batch(
x_train[:batch_size],
y_train[:batch_size],
sample_weight=sample_weight[:batch_size])
def test_temporal_sample_weights(self):
num_classes = 5
weighted_class = 3
train_samples = 1000
test_samples = 1000
input_dim = 5
timesteps = 3
model = keras.models.Sequential()
model.add(
keras.layers.TimeDistributed(
keras.layers.Dense(num_classes),
input_shape=(timesteps, input_dim)))
model.add(keras.layers.Activation('softmax'))
np.random.seed(1337)
(_, y_train), _ = testing_utils.get_test_data(
train_samples=train_samples,
test_samples=test_samples,
input_shape=(input_dim,),
num_classes=num_classes)
int_y_train = y_train.copy()
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
class_weight = dict([(i, 1.) for i in range(num_classes)])
class_weight[weighted_class] = 2.
sample_weight = np.ones((y_train.shape[0]))
sample_weight[int_y_train == weighted_class] = 2.
with self.assertRaises(ValueError):
model.compile(
loss='binary_crossentropy',
optimizer=RMSPropOptimizer(learning_rate=0.001),
sample_weight_mode='temporal')
def test_class_weight_invalid_use_case(self):
num_classes = 5
train_samples = 1000
test_samples = 1000
input_dim = 5
timesteps = 3
model = keras.models.Sequential()
model.add(
keras.layers.TimeDistributed(
keras.layers.Dense(num_classes),
input_shape=(timesteps, input_dim)))
model.add(keras.layers.Activation('softmax'))
model.compile(
loss='binary_crossentropy',
optimizer=RMSPropOptimizer(learning_rate=0.001))
(x_train, y_train), _ = testing_utils.get_test_data(
train_samples=train_samples,
test_samples=test_samples,
input_shape=(input_dim,),
num_classes=num_classes)
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
class_weight = dict([(i, 1.) for i in range(num_classes)])
del class_weight[1]
with self.assertRaises(ValueError):
model.fit(x_train, y_train,
epochs=0, verbose=0, class_weight=class_weight)
with self.assertRaises(ValueError):
model.compile(
loss='binary_crossentropy',
optimizer=RMSPropOptimizer(learning_rate=0.001),
sample_weight_mode=[])
# Build multi-output model
x = keras.Input((3,))
y1 = keras.layers.Dense(4, name='1')(x)
y2 = keras.layers.Dense(4, name='2')(x)
model = keras.models.Model(x, [y1, y2])
model.compile(optimizer=RMSPropOptimizer(learning_rate=0.001), loss='mse')
x_np = np.random.random((10, 3))
y_np = np.random.random((10, 4))
w_np = np.random.random((10,))
# This will work
model.fit(x_np, [y_np, y_np], epochs=1, sample_weight={'1': w_np})
# These will not
with self.assertRaises(ValueError):
model.fit(x_np, [y_np, y_np], epochs=1, sample_weight=[w_np])
with self.assertRaises(TypeError):
model.fit(x_np, [y_np, y_np], epochs=1, sample_weight=w_np)
with self.assertRaises(ValueError):
bad_w_np = np.random.random((11,))
model.fit(x_np, [y_np, y_np], epochs=1, sample_weight={'1': bad_w_np})
with self.assertRaises(ValueError):
bad_w_np = np.random.random((10, 2))
model.fit(x_np, [y_np, y_np], epochs=1, sample_weight={'1': bad_w_np})
with self.assertRaises(ValueError):
bad_w_np = np.random.random((10, 2, 2))
model.fit(x_np, [y_np, y_np], epochs=1, sample_weight={'1': bad_w_np})
class CorrectnessTest(test.TestCase):
@tf_test_util.run_in_graph_and_eager_modes
def test_loss_correctness(self):
# Test that training loss is the same in eager and graph
# (by comparing it to a reference value in a deterministic case)
model = keras.Sequential()
model.add(keras.layers.Dense(3,
activation='relu',
input_dim=4,
kernel_initializer='ones'))
model.add(keras.layers.Dense(2,
activation='softmax',
kernel_initializer='ones'))
model.compile(loss='sparse_categorical_crossentropy',
optimizer=RMSPropOptimizer(learning_rate=0.001))
x = np.ones((100, 4))
np.random.seed(123)
y = np.random.randint(0, 1, size=(100, 1))
history = model.fit(x, y, epochs=1, batch_size=10)
self.assertEqual(
np.around(history.history['loss'][-1], decimals=4), 0.6173)
@tf_test_util.run_in_graph_and_eager_modes
def test_metrics_correctness(self):
model = keras.Sequential()
model.add(keras.layers.Dense(3,
activation='relu',
input_dim=4,
kernel_initializer='ones'))
model.add(keras.layers.Dense(1,
activation='sigmoid',
kernel_initializer='ones'))
model.compile(loss='mae',
metrics=['acc'],
optimizer=RMSPropOptimizer(learning_rate=0.001))
x = np.ones((100, 4))
y = np.ones((100, 1))
outs = model.evaluate(x, y)
self.assertEqual(outs[1], 1.)
y = np.zeros((100, 1))
outs = model.evaluate(x, y)
self.assertEqual(outs[1], 0.)
@tf_test_util.run_in_graph_and_eager_modes
def test_loss_correctness_with_iterator(self):
# Test that training loss is the same in eager and graph
# (by comparing it to a reference value in a deterministic case)
model = keras.Sequential()
model.add(
keras.layers.Dense(
3, activation='relu', input_dim=4, kernel_initializer='ones'))
model.add(
keras.layers.Dense(2, activation='softmax', kernel_initializer='ones'))
model.compile(
loss='sparse_categorical_crossentropy',
optimizer=RMSPropOptimizer(learning_rate=0.001))
x = np.ones((100, 4), dtype=np.float32)
np.random.seed(123)
y = np.random.randint(0, 1, size=(100, 1))
dataset = dataset_ops.Dataset.from_tensor_slices((x, y))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
iterator = dataset.make_one_shot_iterator()
history = model.fit(iterator, epochs=1, steps_per_epoch=10)
self.assertEqual(np.around(history.history['loss'][-1], decimals=4), 0.6173)
@tf_test_util.run_in_graph_and_eager_modes
def test_metrics_correctness_with_iterator(self):
model = keras.Sequential()
model.add(
keras.layers.Dense(
8, activation='relu', input_dim=4, kernel_initializer='ones'))
model.add(
keras.layers.Dense(1, activation='sigmoid', kernel_initializer='ones'))
model.compile(
loss='binary_crossentropy',
metrics=['accuracy'],
optimizer=RMSPropOptimizer(learning_rate=0.001))
np.random.seed(123)
x = np.random.randint(10, size=(100, 4)).astype(np.float32)
y = np.random.randint(2, size=(100, 1)).astype(np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((x, y))
dataset = dataset.batch(10)
iterator = dataset.make_one_shot_iterator()
outs = model.evaluate(iterator, steps=10)
self.assertEqual(np.around(outs[1], decimals=1), 0.5)
y = np.zeros((100, 1), dtype=np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((x, y))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
iterator = dataset.make_one_shot_iterator()
outs = model.evaluate(iterator, steps=10)
self.assertEqual(outs[1], 0.)
if __name__ == '__main__':
ops.enable_eager_execution()
test.main()
|
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from pylib.base import base_test_result
from pylib.results import json_results
class JsonResultsTest(unittest.TestCase):
def testGenerateResultsDict_passedResult(self):
result = base_test_result.BaseTestResult(
'test.package.TestName', base_test_result.ResultType.PASS)
all_results = base_test_result.TestRunResults()
all_results.AddResult(result)
results_dict = json_results.GenerateResultsDict([all_results])
self.assertEquals(
['test.package.TestName'],
results_dict['all_tests'])
self.assertEquals(1, len(results_dict['per_iteration_data']))
iteration_result = results_dict['per_iteration_data'][0]
self.assertTrue('test.package.TestName' in iteration_result)
self.assertEquals(1, len(iteration_result['test.package.TestName']))
test_iteration_result = iteration_result['test.package.TestName'][0]
self.assertTrue('status' in test_iteration_result)
self.assertEquals('SUCCESS', test_iteration_result['status'])
def testGenerateResultsDict_skippedResult(self):
result = base_test_result.BaseTestResult(
'test.package.TestName', base_test_result.ResultType.SKIP)
all_results = base_test_result.TestRunResults()
all_results.AddResult(result)
results_dict = json_results.GenerateResultsDict([all_results])
self.assertEquals(
['test.package.TestName'],
results_dict['all_tests'])
self.assertEquals(1, len(results_dict['per_iteration_data']))
iteration_result = results_dict['per_iteration_data'][0]
self.assertTrue('test.package.TestName' in iteration_result)
self.assertEquals(1, len(iteration_result['test.package.TestName']))
test_iteration_result = iteration_result['test.package.TestName'][0]
self.assertTrue('status' in test_iteration_result)
self.assertEquals('SKIPPED', test_iteration_result['status'])
def testGenerateResultsDict_failedResult(self):
result = base_test_result.BaseTestResult(
'test.package.TestName', base_test_result.ResultType.FAIL)
all_results = base_test_result.TestRunResults()
all_results.AddResult(result)
results_dict = json_results.GenerateResultsDict([all_results])
self.assertEquals(
['test.package.TestName'],
results_dict['all_tests'])
self.assertEquals(1, len(results_dict['per_iteration_data']))
iteration_result = results_dict['per_iteration_data'][0]
self.assertTrue('test.package.TestName' in iteration_result)
self.assertEquals(1, len(iteration_result['test.package.TestName']))
test_iteration_result = iteration_result['test.package.TestName'][0]
self.assertTrue('status' in test_iteration_result)
self.assertEquals('FAILURE', test_iteration_result['status'])
def testGenerateResultsDict_duration(self):
result = base_test_result.BaseTestResult(
'test.package.TestName', base_test_result.ResultType.PASS, duration=123)
all_results = base_test_result.TestRunResults()
all_results.AddResult(result)
results_dict = json_results.GenerateResultsDict([all_results])
self.assertEquals(
['test.package.TestName'],
results_dict['all_tests'])
self.assertEquals(1, len(results_dict['per_iteration_data']))
iteration_result = results_dict['per_iteration_data'][0]
self.assertTrue('test.package.TestName' in iteration_result)
self.assertEquals(1, len(iteration_result['test.package.TestName']))
test_iteration_result = iteration_result['test.package.TestName'][0]
self.assertTrue('elapsed_time_ms' in test_iteration_result)
self.assertEquals(123, test_iteration_result['elapsed_time_ms'])
def testGenerateResultsDict_multipleResults(self):
result1 = base_test_result.BaseTestResult(
'test.package.TestName1', base_test_result.ResultType.PASS)
result2 = base_test_result.BaseTestResult(
'test.package.TestName2', base_test_result.ResultType.PASS)
all_results = base_test_result.TestRunResults()
all_results.AddResult(result1)
all_results.AddResult(result2)
results_dict = json_results.GenerateResultsDict([all_results])
self.assertEquals(
['test.package.TestName1', 'test.package.TestName2'],
results_dict['all_tests'])
self.assertTrue('per_iteration_data' in results_dict)
iterations = results_dict['per_iteration_data']
self.assertEquals(1, len(iterations))
expected_tests = set([
'test.package.TestName1',
'test.package.TestName2',
])
for test_name, iteration_result in iterations[0].iteritems():
self.assertTrue(test_name in expected_tests)
expected_tests.remove(test_name)
self.assertEquals(1, len(iteration_result))
test_iteration_result = iteration_result[0]
self.assertTrue('status' in test_iteration_result)
self.assertEquals('SUCCESS', test_iteration_result['status'])
def testGenerateResultsDict_passOnRetry(self):
raw_results = []
result1 = base_test_result.BaseTestResult(
'test.package.TestName1', base_test_result.ResultType.FAIL)
run_results1 = base_test_result.TestRunResults()
run_results1.AddResult(result1)
raw_results.append(run_results1)
result2 = base_test_result.BaseTestResult(
'test.package.TestName1', base_test_result.ResultType.PASS)
run_results2 = base_test_result.TestRunResults()
run_results2.AddResult(result2)
raw_results.append(run_results2)
results_dict = json_results.GenerateResultsDict([raw_results])
self.assertEquals(['test.package.TestName1'], results_dict['all_tests'])
# Check that there's only one iteration.
self.assertIn('per_iteration_data', results_dict)
iterations = results_dict['per_iteration_data']
self.assertEquals(1, len(iterations))
# Check that test.package.TestName1 is the only test in the iteration.
self.assertEquals(1, len(iterations[0]))
self.assertIn('test.package.TestName1', iterations[0])
# Check that there are two results for test.package.TestName1.
actual_test_results = iterations[0]['test.package.TestName1']
self.assertEquals(2, len(actual_test_results))
# Check that the first result is a failure.
self.assertIn('status', actual_test_results[0])
self.assertEquals('FAILURE', actual_test_results[0]['status'])
# Check that the second result is a success.
self.assertIn('status', actual_test_results[1])
self.assertEquals('SUCCESS', actual_test_results[1]['status'])
def testGenerateResultsDict_globalTags(self):
raw_results = []
global_tags = ['UNRELIABLE_RESULTS']
results_dict = json_results.GenerateResultsDict(
[raw_results], global_tags=global_tags)
self.assertEquals(['UNRELIABLE_RESULTS'], results_dict['global_tags'])
def testGenerateResultsDict_loslessSnippet(self):
result = base_test_result.BaseTestResult(
'test.package.TestName', base_test_result.ResultType.FAIL)
log = 'blah-blah'
result.SetLog(log)
all_results = base_test_result.TestRunResults()
all_results.AddResult(result)
results_dict = json_results.GenerateResultsDict([all_results])
self.assertEquals(
['test.package.TestName'],
results_dict['all_tests'])
self.assertEquals(1, len(results_dict['per_iteration_data']))
iteration_result = results_dict['per_iteration_data'][0]
self.assertTrue('test.package.TestName' in iteration_result)
self.assertEquals(1, len(iteration_result['test.package.TestName']))
test_iteration_result = iteration_result['test.package.TestName'][0]
self.assertTrue('losless_snippet' in test_iteration_result)
self.assertTrue(test_iteration_result['losless_snippet'])
self.assertTrue('output_snippet' in test_iteration_result)
self.assertEquals(log, test_iteration_result['output_snippet'])
self.assertTrue('output_snippet_base64' in test_iteration_result)
self.assertEquals('', test_iteration_result['output_snippet_base64'])
if __name__ == '__main__':
unittest.main(verbosity=2)
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class KeysOperations(object):
"""KeysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.keyvault.v2021_06_01_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def create_if_not_exist(
self,
resource_group_name, # type: str
vault_name, # type: str
key_name, # type: str
parameters, # type: "_models.KeyCreateParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.Key"
"""Creates the first version of a new key if it does not exist. If it already exists, then the
existing key is returned without any write operations being performed. This API does not create
subsequent versions, and does not update existing keys.
:param resource_group_name: The name of the resource group which contains the specified key
vault.
:type resource_group_name: str
:param vault_name: The name of the key vault which contains the key to be created.
:type vault_name: str
:param key_name: The name of the key to be created.
:type key_name: str
:param parameters: The parameters used to create the specified key.
:type parameters: ~azure.mgmt.keyvault.v2021_06_01_preview.models.KeyCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Key, or the result of cls(response)
:rtype: ~azure.mgmt.keyvault.v2021_06_01_preview.models.Key
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Key"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_if_not_exist.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vaultName': self._serialize.url("vault_name", vault_name, 'str', pattern=r'^[a-zA-Z0-9-]{3,24}$'),
'keyName': self._serialize.url("key_name", key_name, 'str', pattern=r'^[a-zA-Z0-9-]{1,127}$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'KeyCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Key', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_if_not_exist.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}/keys/{keyName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
vault_name, # type: str
key_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.Key"
"""Gets the current version of the specified key from the specified key vault.
:param resource_group_name: The name of the resource group which contains the specified key
vault.
:type resource_group_name: str
:param vault_name: The name of the vault which contains the key to be retrieved.
:type vault_name: str
:param key_name: The name of the key to be retrieved.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Key, or the result of cls(response)
:rtype: ~azure.mgmt.keyvault.v2021_06_01_preview.models.Key
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Key"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vaultName': self._serialize.url("vault_name", vault_name, 'str', pattern=r'^[a-zA-Z0-9-]{3,24}$'),
'keyName': self._serialize.url("key_name", key_name, 'str', pattern=r'^[a-zA-Z0-9-]{1,127}$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Key', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}/keys/{keyName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
vault_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.KeyListResult"]
"""Lists the keys in the specified key vault.
:param resource_group_name: The name of the resource group which contains the specified key
vault.
:type resource_group_name: str
:param vault_name: The name of the vault which contains the keys to be retrieved.
:type vault_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either KeyListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.keyvault.v2021_06_01_preview.models.KeyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vaultName': self._serialize.url("vault_name", vault_name, 'str', pattern=r'^[a-zA-Z0-9-]{3,24}$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('KeyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}/keys'} # type: ignore
def get_version(
self,
resource_group_name, # type: str
vault_name, # type: str
key_name, # type: str
key_version, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.Key"
"""Gets the specified version of the specified key in the specified key vault.
:param resource_group_name: The name of the resource group which contains the specified key
vault.
:type resource_group_name: str
:param vault_name: The name of the vault which contains the key version to be retrieved.
:type vault_name: str
:param key_name: The name of the key version to be retrieved.
:type key_name: str
:param key_version: The version of the key to be retrieved.
:type key_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Key, or the result of cls(response)
:rtype: ~azure.mgmt.keyvault.v2021_06_01_preview.models.Key
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Key"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01-preview"
accept = "application/json"
# Construct URL
url = self.get_version.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vaultName': self._serialize.url("vault_name", vault_name, 'str', pattern=r'^[a-zA-Z0-9-]{3,24}$'),
'keyName': self._serialize.url("key_name", key_name, 'str', pattern=r'^[a-zA-Z0-9-]{1,127}$'),
'keyVersion': self._serialize.url("key_version", key_version, 'str', pattern=r'^[a-fA-F0-9]{32}$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Key', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_version.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}/keys/{keyName}/versions/{keyVersion}'} # type: ignore
def list_versions(
self,
resource_group_name, # type: str
vault_name, # type: str
key_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.KeyListResult"]
"""Lists the versions of the specified key in the specified key vault.
:param resource_group_name: The name of the resource group which contains the specified key
vault.
:type resource_group_name: str
:param vault_name: The name of the vault which contains the key versions to be retrieved.
:type vault_name: str
:param key_name: The name of the key versions to be retrieved.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either KeyListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.keyvault.v2021_06_01_preview.models.KeyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_versions.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vaultName': self._serialize.url("vault_name", vault_name, 'str', pattern=r'^[a-zA-Z0-9-]{3,24}$'),
'keyName': self._serialize.url("key_name", key_name, 'str', pattern=r'^[a-zA-Z0-9-]{1,127}$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('KeyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_versions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}/keys/{keyName}/versions'} # type: ignore
|
|
"""
Unit tests for REST Client
"""
import json
import mock
import requests
from purestorage import FlashArray, PureError, PureHTTPError
CLIENT_PATH = "purestorage.purestorage"
ARRAY_OBJ = CLIENT_PATH + ".FlashArray"
class TestBase(object):
def setup_method(self, __):
self.api_token = "12345678-abcd-1234-abcd-1234567890ab"
self.api_token_data = {"api_token": self.api_token}
self.other_rest_versions = ["0.1", "1.1", "1.0", "99.9"]
self.rest_version = "1.2"
self.rest_version_data = {"version": self.other_rest_versions}
self.target = "pure-target"
@staticmethod
def assert_raises(error, func, *args, **kwargs):
"""Assert that a function raises the correct error
Fail if the function, called with the specified args and kwargs
doesn't raise an exception of type error.
"""
try:
func(*args, **kwargs)
except error:
pass
else:
raise AssertionError()
@classmethod
def assert_error_propagates(cls, mocks, func, *args, **kwargs):
"""Assert that errors from mocks propogate to func.
Fail if exceptions raised by mocks are not seen when calling
func(*args, **kwargs). Ensure that we are really seeing exceptions
from the mocks by failing if just running func(*args, **kargs) raises
an exception itself.
"""
func(*args, **kwargs)
for mock_func in mocks:
mock_func.side_effect = PureError("reason")
cls.assert_raises(PureError, func, *args, **kwargs)
mock_func.side_effect = None
@mock.patch(ARRAY_OBJ + "._request", autospec=True)
class TestInit(TestBase):
def setup_method(self, method):
super(TestInit, self).setup_method(method)
self.password = "purepass"
self.username = "pureuser"
self.username_data = {"username": self.username}
def test_init_with_api_token(self, mock_request):
mock_request.side_effect = iter([
self.rest_version_data,
self.username_data,
])
array = FlashArray(self.target, api_token=self.api_token)
expected = [
mock.call(
array, "GET", "https://{0}/api/api_version".format(self.target),
reestablish_session=False),
mock.call(array, "POST", "auth/session", self.api_token_data,
reestablish_session=False),
]
assert array._target == self.target
assert array._rest_version == "1.1"
assert array._renegotiate_rest_version == True
assert array._api_token == self.api_token
assert mock_request.call_args_list == expected
def test_init_with_username_password(self, mock_request):
mock_request.side_effect = iter([
self.rest_version_data,
self.api_token_data,
self.username_data,
])
array = FlashArray(self.target, self.username, self.password)
expected = [
mock.call(array, "GET",
"https://{0}/api/api_version".format(self.target),
reestablish_session=False),
mock.call(array, "POST", "auth/apitoken",
{"username": self.username, "password": self.password},
reestablish_session=False),
mock.call(array, "POST", "auth/session", self.api_token_data,
reestablish_session=False),
]
assert array._target == self.target
assert array._rest_version == "1.1"
assert array._renegotiate_rest_version == True
assert array._api_token == self.api_token
assert mock_request.call_args_list == expected
def test_init_with_version(self, mock_request):
mock_request.side_effect = iter([
{"version": ["0.1", "1.1", "1.0", "1.2", "1.3"]},
self.username_data,
])
array = FlashArray(self.target, api_token=self.api_token, rest_version="1.0")
expected = [
mock.call(array, "GET",
"https://{0}/api/api_version".format(self.target),
reestablish_session=False),
mock.call(array, "POST", "auth/session", self.api_token_data,
reestablish_session=False),
]
assert array._target == self.target
assert array._rest_version == "1.0"
assert array._renegotiate_rest_version == False
assert array._api_token == self.api_token
assert mock_request.call_args_list == expected
@mock.patch(ARRAY_OBJ + "._start_session", autospec=True)
@mock.patch(ARRAY_OBJ + "._obtain_api_token", autospec=True)
@mock.patch(ARRAY_OBJ + "._check_rest_version", autospec=True)
@mock.patch(ARRAY_OBJ + "._choose_rest_version", autospec=True)
def test_init_exceptions(self, mock_choose, mock_check, mock_obtain,
mock_start, __):
mock_choose.return_value = self.rest_version
mock_check.return_value = self.rest_version
mock_obtain.return_value = self.api_token
mock_start.return_value = None
self.assert_error_propagates(
[mock_choose, mock_start], FlashArray,
self.target, api_token=self.api_token)
self.assert_error_propagates(
[mock_check, mock_start], FlashArray,
self.target, api_token=self.api_token,
rest_version=self.rest_version)
self.assert_error_propagates(
[mock_choose, mock_obtain, mock_start], FlashArray,
self.target, self.username, self.password)
self.assert_error_propagates(
[mock_check, mock_obtain, mock_start], FlashArray,
self.target, self.username, self.password,
rest_version=self.rest_version)
def test_init_bad_args(self, mock_request):
args_list = [
([self.username, self.password], self.api_token_data),
([self.username], self.api_token_data),
([], {"api_token": self.api_token, "password": self.password}),
([self.username], {}),
([self.password], {}),
([], {}),
]
for args, kwargs in args_list:
self.assert_raises(ValueError, FlashArray, self.target, *args, **kwargs)
assert mock_request.call_count == 0
def test_init_verify_https(self, mock_request):
mock_request.side_effect = iter([
self.rest_version_data,
self.username_data,
])
cert_path = '/etc/ssl/certs/ca-cert.crt'
array = FlashArray(self.target,
api_token=self.api_token,
verify_https=True,
ssl_cert=cert_path)
expected = [
mock.call(
array, "GET", "https://{0}/api/api_version".format(self.target),
reestablish_session=False),
mock.call(array, "POST", "auth/session", self.api_token_data,
reestablish_session=False),
]
mock_request.assert_has_calls(expected)
assert cert_path == array._ssl_cert
assert array._verify_https
class TestArrayBase(TestBase):
def setup_method(self, method):
super(TestArrayBase, self).setup_method(method)
self.cookie_jar = {"session": "session-cookie"}
self.supported_rest_versions = ["1.0", "1.1", "1.2"]
array = FakeFlashArray()
array.supported_rest_versions = self.supported_rest_versions
array._target = self.target
array._rest_version = self.rest_version
array._renegotiate_rest_version = True
array._api_token = self.api_token
array._cookies = self.cookie_jar
array._verify_https = False
array._ssl_cert = None
array._user_agent = None
self.array = array
@mock.patch(CLIENT_PATH + ".requests.request", autospec=True)
class TestRequest(TestArrayBase):
def setup_method(self, method):
super(TestRequest, self).setup_method(method)
self.method = "POST"
self.path = "path"
self.path_template = "https://{0}/api/{1}/{2}"
self.full_path = self.path_template.format(
self.target, self.rest_version, self.path)
self.cookies = self.cookie_jar
self.data = {"list": [1, 2, 3]}
self.data_json = json.dumps(self.data)
self.error_msg = "ERROR!"
self.headers = {"Content-Type": "application/json"}
self.new_cookies = {"session": "new-session-cookie"}
self.response_json = '[{"hello": "world"}, "!"]'
self.result = json.loads(self.response_json)
self.ssl_cert = '/etc/ssl/certs/ca-cert.crt'
self.default_call = self.make_call()
def make_response(self, status, data=None, cookies=None):
response = mock.Mock(
spec=["reason", "status_code", "headers", "text", "json", "cookies"])
response.cookies = cookies or {}
response.headers = self.headers
response.json.return_value = data or self.result
response.reason = self.error_msg
response.status_code = status
response.text = json.dumps(self.response_json)
return response
def make_call(self, method=None, path=None, data=None, cookies=None, headers=None):
method = method or self.method
path = path or self.full_path
data = data or self.data_json
cookies = cookies or self.cookies
headers = headers or self.headers
return mock.call(method, path, data=data, headers=headers,
cookies=cookies, verify=False)
def test_request_success(self, mock_request):
mock_request.return_value = self.make_response(200)
real_result = self.array._request(self.method, self.path, self.data)
assert self.result == real_result
assert mock_request.call_args_list == [self.default_call]
def test_request_custom_user_agent_success(self, mock_request):
mock_request.return_value = self.make_response(200)
user_agent = 'Foo Client/3.2.1'
headers = self.headers
headers['User-Agent'] = user_agent
self.array._user_agent = user_agent
real_result = self.array._request(self.method, self.path, self.data)
assert self.result == real_result
assert mock_request.call_args_list == [self.make_call(headers=headers)]
def test_request_401_error(self, mock_request):
start_session_call = self.make_call(
"POST", self.path_template.format(
self.target, self.rest_version, "auth/session"),
json.dumps(self.api_token_data))
mock_request.side_effect = iter([
self.make_response(401),
self.make_response(200, cookies=self.new_cookies),
self.make_response(200, cookies=self.new_cookies)
])
real_result = self.array._request(self.method, self.path, self.data)
assert self.result == real_result
expected = [self.default_call,
start_session_call,
self.make_call(cookies=self.new_cookies)]
assert mock_request.call_args_list == expected
mock_request.reset_mock()
mock_request.side_effect = iter([self.make_response(401)] * 2)
expected = [self.default_call, start_session_call]
self.assert_raises(PureHTTPError, self.array._request,
self.method, self.path, self.data)
assert mock_request.call_args_list == expected
mock_request.reset_mock()
mock_request.side_effect = iter([
self.make_response(401),
self.make_response(200, cookies=self.new_cookies),
self.make_response(401),
])
expected = [self.default_call, start_session_call, self.make_call()]
self.assert_raises(PureHTTPError, self.array._request,
self.method, self.path, self.data)
assert mock_request.call_args_list == expected
def test_request_450_error(self, mock_request):
choose_rest_version_call = self.make_call(
"GET", "https://{0}/api/api_version".format(self.target), "null")
mock_request.side_effect = iter([
self.make_response(450),
self.make_response(200, self.rest_version_data),
self.make_response(200),
])
expected = [
self.default_call,
choose_rest_version_call,
self.make_call(
path=self.path_template.format(self.target, "1.1", self.path))
]
real_result = self.array._request(self.method, self.path, self.data)
assert self.result == real_result
assert mock_request.call_args_list == expected
mock_request.reset_mock()
self.array._rest_version = self.rest_version
mock_request.side_effect = iter([
self.make_response(450),
self.make_response(200, {"version": ["1.1", self.rest_version, "1.3"]}),
])
expected = [self.default_call, choose_rest_version_call]
self.assert_raises(PureHTTPError, self.array._request,
self.method, self.path, self.data)
assert mock_request.call_args_list == expected
mock_request.reset_mock()
mock_request.side_effect = iter([
self.make_response(450),
PureError("reason")
])
expected = [self.default_call, choose_rest_version_call]
self.assert_raises(PureError, self.array._request,
self.method, self.path, self.data)
assert mock_request.call_args_list == expected
mock_request.reset_mock()
self.array._renegotiate_rest_version = False
mock_request.return_value = self.make_response(450)
mock_request.side_effect = None
expected = [self.default_call]
self.assert_raises(PureHTTPError, self.array._request,
self.method, self.path, self.data)
assert mock_request.call_args_list == expected
def test_request_other_error(self, mock_request):
mock_request.return_value = self.make_response(500)
self.assert_raises(PureHTTPError, self.array._request,
self.method, self.path, self.data)
assert mock_request.call_args_list == [self.default_call]
def test_request_request_exception(self, mock_request):
mock_request.side_effect = requests.exceptions.RequestException
# try/except used to ensure is instance of type but not subtype
try:
self.array._request(self.method, self.path, self.data)
except PureError as err:
assert not isinstance(err, PureHTTPError)
else:
raise AssertionError()
assert mock_request.call_args_list == [self.default_call]
def test_request_other_exception(self, mock_request):
mock_request.return_value = self.make_response(200)
self.assert_error_propagates([mock_request], self.array._request,
self.method, self.path, self.data)
def _test_request_verify_https_with_ssl_cert(self, mock_request,
verify_https=False,
ssl_cert=None,
expected_verify=None):
self.array._verify_https = verify_https
self.array._ssl_cert = ssl_cert
mock_request.return_value = self.make_response(200)
self.array._request(self.method, self.path, self.data)
mock_request.assert_called_once_with(self.method,
self.full_path,
headers=self.headers,
cookies=self.cookies,
data=self.data_json,
verify=expected_verify)
def test_request_verify_https(self, mock_request):
self._test_request_verify_https_with_ssl_cert(mock_request,
verify_https=True,
expected_verify=True)
def test_request_verify_https_with_ssl_cert(self, mock_request):
self._test_request_verify_https_with_ssl_cert(mock_request,
verify_https=True,
ssl_cert=self.ssl_cert,
expected_verify=self.ssl_cert)
def test_request_dont_verify_https_with_ssl_cert(self, mock_request):
self._test_request_verify_https_with_ssl_cert(mock_request,
verify_https=False,
ssl_cert=self.ssl_cert,
expected_verify=False)
@mock.patch(ARRAY_OBJ + "._request", autospec=True)
class TestOtherMethods(TestArrayBase):
def test_check_rest_version(self, mock_request):
mock_request.return_value = self.rest_version_data
ex_args = [self.array, "GET",
"https://{0}/api/api_version".format(self.target)]
ex_kwargs = {"reestablish_session": False}
result = self.array._check_rest_version("1.0")
assert result == "1.0"
mock_request.assert_called_once_with(*ex_args, **ex_kwargs)
mock_request.reset_mock()
result = self.array._check_rest_version(1.0)
assert result == "1.0"
mock_request.assert_called_once_with(*ex_args, **ex_kwargs)
mock_request.reset_mock()
self.assert_raises(ValueError, self.array._check_rest_version, "0.1")
assert mock_request.call_count == 0
mock_request.reset_mock()
self.assert_raises(ValueError, self.array._check_rest_version, "1.2")
mock_request.assert_called_once_with(*ex_args, **ex_kwargs)
mock_request.reset_mock()
mock_request.side_effect = PureError("reason")
self.assert_raises(PureError, self.array._check_rest_version, "1.0")
mock_request.assert_called_once_with(*ex_args, **ex_kwargs)
def test_choose_rest_version(self, mock_request):
mock_request.return_value = self.rest_version_data
ex_args = [self.array, "GET",
"https://{0}/api/api_version".format(self.target)]
ex_kwargs = {"reestablish_session": False}
result = self.array._choose_rest_version()
assert result == "1.1"
mock_request.assert_called_once_with(*ex_args, **ex_kwargs)
mock_request.reset_mock()
mock_request.return_value = {"version": ["0.1", "1.3"]}
self.assert_raises(PureError, self.array._choose_rest_version)
mock_request.assert_called_once_with(*ex_args, **ex_kwargs)
mock_request.reset_mock()
mock_request.side_effect = PureError("reason")
self.assert_raises(PureError, self.array._choose_rest_version)
mock_request.assert_called_once_with(*ex_args, **ex_kwargs)
class FakeFlashArray(FlashArray):
"""FlashArray with dummy __init__ so attributes can be set directly"""
def __init__(self): #pylint: disable=super-init-not-called
pass
|
|
# simple types for "assembler" instructions
import string
from fracttypes import Bool, Int, Float, Complex, Hyper, Color, Gradient, Image
from fracttypes import TranslationError, typeObjectList
class ComplexArg:
' a pair of args'
def __init__(self,re,im):
self.re = re
self.im = im
def format(self):
[self.re.format(), self.im.format()]
def __str__(self):
return "Complex(%s,%s)" % (self.re, self.im)
class HyperArg:
'four args'
def __init__(self,re,im1,im2,im3):
self.parts = [re, im1, im2, im3]
def format(self):
return [x.format() for x in self.parts]
def __str__(self):
return "Hyper(%s,%s,%s,%s)" % tuple(self.parts)
class ColorArg:
'four args'
def __init__(self,re,im1,im2,im3):
self.parts = [re, im1, im2, im3]
def format(self):
return [x.format() for x in self.parts]
def __str__(self):
return "Color(%s,%s,%s,%s)" % tuple(self.parts)
class ConstArg:
def __init__(self, value):
self.value = value
class ConstFloatArg(ConstArg):
def __init__(self,value):
ConstArg.__init__(self, value)
def cformat(self):
return "%.17f"
def format(self):
return "%.17f" % self.value
def __str__(self):
return "Float(%s)" % self.format()
def is_one(self):
return self.value == 1.0
def is_zero(self):
return self.value == 0.0
class ConstIntArg(ConstArg):
def __init__(self,value):
ConstArg.__init__(self, value)
def cformat(self):
return "%d"
def format(self):
return "%d" % self.value
def __str__(self):
return "Int(%s)" % self.format()
def is_one(self):
return self.value == 1
def is_zero(self):
return self.value == 0
class TempArg:
def __init__(self,value,type):
self.value = value
self.type = typeObjectList[type]
def format(self):
return self.value
def cformat(self):
return self.type.printf
def __str__(self):
return "Temp(%s)" % self.format()
def create_arg_from_val(type,val):
if type == Int or type == Bool or type == Image:
return ConstIntArg(val)
elif type == Float:
return ConstFloatArg(val)
elif type == Complex:
return ComplexArg(ConstFloatArg(val[0]),ConstFloatArg(val[1]))
elif type == Hyper:
return HyperArg(
ConstFloatArg(val[0]),ConstFloatArg(val[1]),
ConstFloatArg(val[2]),ConstFloatArg(val[3]))
elif type == Color:
return ColorArg(
ConstFloatArg(val[0]),ConstFloatArg(val[1]),
ConstFloatArg(val[2]),ConstFloatArg(val[3]))
else:
raise TranslationError(
"Internal Compiler Error: Unknown constant type %s" % type)
def create_arg(t):
return create_arg_from_val(t.datatype,t.value)
class Insn:
'An instruction to be written to output stream'
def __init__(self,assem):
self.assem = assem # string format of instruction
def source(self):
return []
def dest(self):
return []
def format(self):
try:
lookup = {}
i = 0
if self.src != None:
for src in self.src:
sname = "s%d" % i
lookup[sname] = src.format()
i = i+1
i = 0
if self.dst != None:
for dst in self.dst:
dname = "d%d" % i
lookup[dname] = dst.format()
i = i+1
return self.assem % lookup
except Exception, exn:
print exn
msg = "%s with %s" % (self, lookup)
raise TranslationError(
"Internal Compiler Error: can't format " + msg)
class Literal(Insn):
'A loophole in the system to sneak through text directly'
def __init__(self,text):
self.text = text
def cformat(self):
return self.text
def format(self):
return self.text
def dest(self):
return []
def source(self):
return []
class Oper(Insn):
'An operation'
def __init__(self,assem, src, dst, jumps=[]):
Insn.__init__(self,assem)
self.src = src
self.dst = dst
self.jumps = jumps
def dest(self):
return self.dst
def source(self):
return self.src
def __str__(self):
return "OPER(%s,[%s],[%s],%s)" % \
(self.assem,
string.join([x.__str__() for x in self.src],","),
string.join([x.__str__() for x in self.dst],","),
self.jumps)
class Binop(Oper):
'A binary infix operation, like addition'
def __init__(self, op, src, dst, generate_trace = False):
Insn.__init__(self,"")
self.op = op
self.src = src
self.dst = dst
self.trace = generate_trace
def const_eval(self):
c1 = self.src[0]
c2 = self.src[1]
klass = c1.__class__
if self.op == "*":
val = klass(c1.value * c2.value)
elif self.op == "+":
val = klass(c1.value + c2.value)
elif self.op == "-":
val = klass(c1.value - c2.value)
elif self.op == "/":
val = klass(c1.value / c2.value)
else:
# don't know how to const_eval
return self
return Move([val], self.dst)
def __str__(self):
return "BINOP(%s,[%s],[%s])" % \
(self.op,
string.join([x.__str__() for x in self.src],","),
string.join([x.__str__() for x in self.dst],","))
def format(self):
result = "%s = %s %s %s;" % (
self.dst[0].format(),
self.src[0].format(),
self.op,
self.src[1].format())
if self.trace:
result += "printf(\"%s = %s (%s %s %s)\\n\",%s);" % (
self.dst[0].format(),
self.dst[0].cformat(),
self.src[0].format(),
self.op,
self.src[1].format(),
self.dst[0].format())
return result
class Label(Insn):
'A label which can be jumped to'
def __init__(self, label):
Insn.__init__(self,"%s: ;\n" % label)
self.label = label
def format(self):
return "%s ;\n" % self
def __str__(self):
return "%s:" % self.label
class Move(Insn):
' A move instruction'
def __init__(self,src,dst, generate_trace = False):
Insn.__init__(self,"%(d0)s = %(s0)s;")
self.src = src
self.dst = dst
self.trace = generate_trace
def dest(self):
return [self.dst]
def source(self):
return [self.src]
def format(self):
result = "%s = %s;" % (self.dst[0].format(), self.src[0].format())
if self.trace:
result += "printf(\"%s = %s\\n\",%s);" % (
self.dst[0].format(),
self.dst[0].cformat(),
self.src[0].format())
return result
def __str__(self):
return "MOVE(%s,%s,%s)" % (self.assem, self.src, self.dst)
class Decl(Insn):
' a variable declaration'
def __init__(self,assem):
Insn.__init__(self,assem)
self.src = None
self.dst = None
def __str__(self):
return "DECL(%s,%s)" % (self.src, self.dst)
|
|
import unittest
from unittest import mock
from unittest.mock import patch
from canvas_sdk.client import RequestContext
from canvas_sdk.methods import sections
class TestSections(unittest.TestCase):
def setUp(self):
self.section_id = 1234
self.course_id = 9999 # set a fake course id for the tests
self.course_name = 'Fake Course'
self.test_request_kwargs = {'headers': {'my': 'header'}, 'cert': 'my-cert'}
# Set up the request context
self.req_ctx = mock.MagicMock(name='request-context', spec=RequestContext)
self.req_ctx.base_api_url = 'http://base/url/api'
self.req_ctx.per_page = 10
@patch('canvas_sdk.methods.sections.utils.validate_attr_is_acceptable')
@patch('canvas_sdk.methods.sections.client.get')
def test_list_course_sections_get_called_with_request_context(self, mock_client_get, mock_validate):
"""
Assert that request_context is passed to client 'get' call
"""
sections.list_course_sections(self.req_ctx, self.course_id)
mock_client_get.assert_called_once_with(
self.req_ctx, mock.ANY, payload=mock.ANY)
@patch('canvas_sdk.methods.sections.utils.validate_attr_is_acceptable')
@patch('canvas_sdk.methods.sections.client.get')
def test_list_course_sections_get_called_with_absolute_url(self, mock_client_get, mock_validate):
"""
Assert that an absolute url made of base_api_url from context and method path is passed to client 'get' call
"""
sections.list_course_sections(self.req_ctx, self.course_id)
mock_client_get.assert_called_once_with(
mock.ANY, self.req_ctx.base_api_url + '/v1/courses/%s/sections' % self.course_id, payload=mock.ANY)
@patch('canvas_sdk.methods.sections.utils.validate_attr_is_acceptable')
@patch('canvas_sdk.methods.sections.client.get')
def test_list_course_sections_calls_validate_attributes(self, mock_client_get, mock_validate):
"""
Assert that validate_attr_is_acceptable called for include
"""
include = 'students'
sections.list_course_sections(self.req_ctx, self.course_id, include)
mock_validate.assert_called_once_with(include, ('students', 'avatar_url'))
@patch('canvas_sdk.methods.sections.utils.validate_attr_is_acceptable')
@patch('canvas_sdk.methods.sections.client.get')
def test_list_course_sections_get_called_with_default_values(self, mock_client_get, mock_validate):
"""
Assert that client 'get' called with default values for payload data
"""
# Per page should default to request_context's per_page value
per_page_default = self.req_ctx.per_page
sections.list_course_sections(self.req_ctx, self.course_id)
mock_client_get.assert_called_once_with(
mock.ANY, mock.ANY, payload={'include[]': None, 'per_page': per_page_default})
@patch('canvas_sdk.methods.sections.utils.validate_attr_is_acceptable')
@patch('canvas_sdk.methods.sections.client.get')
def test_list_course_sections_get_called_with_user_arg_values(self, mock_client_get, mock_validate):
"""
Assert that client 'get' called with user defined arg values for payload data
"""
include = 'students'
per_page = 60
sections.list_course_sections(self.req_ctx, self.course_id, include, per_page)
mock_client_get.assert_called_once_with(
mock.ANY, mock.ANY, payload={'include[]': include, 'per_page': per_page})
@patch('canvas_sdk.methods.sections.utils.validate_attr_is_acceptable')
@patch('canvas_sdk.methods.sections.client.get')
def test_list_course_sections_get_called_with_request_kwargs(self, mock_client_get, mock_validate):
"""
Assert that client 'get' called with kwargs as additional parameters
"""
sections.list_course_sections(self.req_ctx, self.course_id, **self.test_request_kwargs)
mock_client_get.assert_called_once_with(
mock.ANY, mock.ANY, payload=mock.ANY, **self.test_request_kwargs)
@patch('canvas_sdk.methods.sections.utils.validate_attr_is_acceptable')
@patch('canvas_sdk.methods.sections.client.get')
def test_list_course_sections_returns_result_from_get(self, mock_client_get, mock_validate):
"""
Assert that method returned the result of client 'get' call
"""
results = sections.list_course_sections(self.req_ctx, self.course_id)
self.assertEqual(results, mock_client_get.return_value, 'The client call did not return the correct result.')
@patch('canvas_sdk.methods.sections.client.post')
def test_create_course_sections_post_called_with_request_context(self, mock_client_post):
"""
Assert that request_context.per_page is called when no user value passed in
"""
sections.create_course_section(self.req_ctx, self.course_id, self.course_name)
mock_client_post.assert_called_once_with(
self.req_ctx, mock.ANY, payload=mock.ANY)
@patch('canvas_sdk.methods.sections.client.post')
def test_create_course_sections_post_called_with_absolute_url(self, mock_client_post):
"""
Assert that request_context.per_page is called when no user value passed in
"""
sections.create_course_section(self.req_ctx, self.course_id, self.course_name)
mock_client_post.assert_called_once_with(
mock.ANY, self.req_ctx.base_api_url + '/v1/courses/%s/sections' % self.course_id, payload=mock.ANY)
@patch('canvas_sdk.methods.sections.client.post')
def test_create_course_sections_post_called_with_default_values(self, mock_client_post):
"""
Assert that client 'post' called with default values for payload data
"""
sections.create_course_section(self.req_ctx, self.course_id, self.course_name)
mock_client_post.assert_called_once_with(
mock.ANY, mock.ANY, payload={
'course_section[name]': self.course_name,
'course_section[sis_section_id]': None,
'course_section[start_at]': None,
'course_section[end_at]': None}
)
@patch('canvas_sdk.methods.sections.client.post')
def test_create_course_sections_post_called_with_user_arg_values(self, mock_client_post):
"""
Assert that client 'post' called with user's arg values for payload data
"""
sis_section_id = '123ABCD'
start_at = '2011-01-01T01:00Z'
end_at = '2011-02-01T01:00Z'
sections.create_course_section(self.req_ctx, self.course_id, self.course_name, sis_section_id, start_at, end_at)
mock_client_post.assert_called_once_with(
mock.ANY, mock.ANY, payload={
'course_section[name]': self.course_name,
'course_section[sis_section_id]': sis_section_id,
'course_section[start_at]': start_at,
'course_section[end_at]': end_at}
)
@patch('canvas_sdk.methods.sections.client.post')
def test_create_course_section_post_called_with_request_kwargs(self, mock_client_post):
"""
Assert that client 'post' called with kwargs as additional parameters
"""
sections.create_course_section(self.req_ctx, self.course_id, self.course_name, **self.test_request_kwargs)
mock_client_post.assert_called_once_with(
mock.ANY, mock.ANY, payload=mock.ANY, **self.test_request_kwargs)
@patch('canvas_sdk.methods.sections.client.post')
def test_create_course_section_returns_result_from_post(self, mock_client_post):
"""
Assert that method returned the result of client 'post' call
"""
results = sections.create_course_section(self.req_ctx, self.course_id, self.course_name)
self.assertEqual(results, mock_client_post.return_value, 'The client call did not return the correct result.')
@patch('canvas_sdk.methods.sections.client.put')
def test_edit_section_put_called_called_with_request_context(self, mock_client_put):
"""
Assert that request_context.per_page is called when no user value passed in
"""
sections.edit_section(self.req_ctx, self.section_id)
mock_client_put.assert_called_once_with(self.req_ctx, mock.ANY, payload=mock.ANY)
@patch('canvas_sdk.methods.sections.client.put')
def test_edit_section_put_called_called_with_absolute_url(self, mock_client_put):
"""
Assert that request_context.per_page is called when no user value passed in
"""
sections.edit_section(self.req_ctx, self.section_id)
mock_client_put.assert_called_once_with(
mock.ANY, self.req_ctx.base_api_url + '/v1/sections/%s' % self.section_id, payload=mock.ANY)
@patch('canvas_sdk.methods.sections.client.put')
def test_edit_section_put_called_with_user_arg_values(self, mock_client_put):
"""
Assert that client 'put' called with user's arg values for payload data
"""
sis_section_id = '123ABCD'
start_at = '2011-01-01T01:00Z'
end_at = '2011-02-01T01:00Z'
sections.edit_section(self.req_ctx, self.section_id, self.course_name, sis_section_id, start_at, end_at)
mock_client_put.assert_called_once_with(
mock.ANY, mock.ANY, payload={
'course_section[name]': self.course_name,
'course_section[sis_section_id]': sis_section_id,
'course_section[start_at]': start_at,
'course_section[end_at]': end_at}
)
@patch('canvas_sdk.methods.sections.client.put')
def test_edit_section_put_called_with_request_kwargs(self, mock_client_put):
"""
Assert that client 'put' called with kwargs as additional parameters
"""
sections.edit_section(self.req_ctx, self.section_id, **self.test_request_kwargs)
mock_client_put.assert_called_once_with(
mock.ANY, mock.ANY, payload=mock.ANY, **self.test_request_kwargs)
@patch('canvas_sdk.methods.sections.client.put')
def test_edit_section_returns_result_from_put(self, mock_client_put):
"""
Assert that method returned the result of client 'delete' call
"""
results = sections.edit_section(self.req_ctx, self.section_id)
self.assertEqual(results, mock_client_put.return_value, 'The client call did not return the correct result.')
@patch('canvas_sdk.methods.sections.client.delete')
def test_delete_section_delete_called_called_with_request_context(self, mock_client_delete):
"""
Assert that request_context.per_page is called when no user value passed in
"""
sections.delete_section(self.req_ctx, self.section_id)
mock_client_delete.assert_called_once_with(self.req_ctx, mock.ANY)
@patch('canvas_sdk.methods.sections.client.delete')
def test_delete_section_delete_called_called_with_absolute_url(self, mock_client_delete):
"""
Assert that request_context.per_page is called when no user value passed in
"""
sections.delete_section(self.req_ctx, self.section_id)
mock_client_delete.assert_called_once_with(
mock.ANY, self.req_ctx.base_api_url + '/v1/sections/%s' % self.section_id)
@patch('canvas_sdk.methods.sections.client.delete')
def test_delete_section_delete_called_with_request_kwargs(self, mock_client_delete):
"""
Assert that client 'delete' called with kwargs as additional parameters
"""
sections.delete_section(self.req_ctx, self.section_id, **self.test_request_kwargs)
mock_client_delete.assert_called_once_with(
mock.ANY, mock.ANY, **self.test_request_kwargs)
@patch('canvas_sdk.methods.sections.client.delete')
def test_delete_section_returns_result_from_delete(self, mock_client_delete):
"""
Assert that method returned the result of client 'delete' call
"""
results = sections.delete_section(self.req_ctx, self.section_id)
self.assertEqual(results, mock_client_delete.return_value, 'The client call did not return the correct result.')
|
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for grit.gather.chrome_html'''
import os
import re
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import unittest
from grit import lazy_re
from grit import util
from grit.gather import chrome_html
_NEW_LINE = lazy_re.compile('(\r\n|\r|\n)', re.MULTILINE)
def StandardizeHtml(text):
'''Standardizes the newline format and png mime type in Html text.'''
return _NEW_LINE.sub('\n', text).replace('data:image/x-png;',
'data:image/png;')
class ChromeHtmlUnittest(unittest.TestCase):
'''Unit tests for ChromeHtml.'''
def testFileResources(self):
'''Tests inlined image file resources with available high DPI assets.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<head>
<link rel="stylesheet" href="test.css">
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
''',
'test.css': '''
.image {
background: url('test.png');
}
''',
'test.png': 'PNG DATA',
'1.4x/test.png': '1.4x PNG DATA',
'1.8x/test.png': '1.8x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '1.4x,1.8x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<head>
<style>
.image {
background: -webkit-image-set(url('data:image/png;base64,UE5HIERBVEE=') 1x, url('data:image/png;base64,MS40eCBQTkcgREFUQQ==') 1.4x, url('data:image/png;base64,MS44eCBQTkcgREFUQQ==') 1.8x);
}
</style>
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
'''))
tmp_dir.CleanUp()
def testFileResourcesImageTag(self):
'''Tests inlined image file resources with available high DPI assets on
an image tag.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<body>
<img id="foo" src="test.png">
</body>
</html>
''',
'test.png': 'PNG DATA',
'2x/test.png': '2x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<body>
<img id="foo" src="data:image/png;base64,UE5HIERBVEE=" style="content: -webkit-image-set(url('data:image/png;base64,UE5HIERBVEE=') 1x, url('data:image/png;base64,MnggUE5HIERBVEE=') 2x);">
</body>
</html>
'''))
tmp_dir.CleanUp()
def testFileResourcesNoFlatten(self):
'''Tests non-inlined image file resources with available high DPI assets.'''
tmp_dir = util.TempDir({
'test.css': '''
.image {
background: url('test.png');
}
''',
'test.png': 'PNG DATA',
'1.4x/test.png': '1.4x PNG DATA',
'1.8x/test.png': '1.8x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('test.css'))
html.SetDefines({'scale_factors': '1.4x,1.8x'})
html.SetAttributes({'flattenhtml': 'false'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
.image {
background: -webkit-image-set(url('test.png') 1x, url('1.4x/test.png') 1.4x, url('1.8x/test.png') 1.8x);
}
'''))
tmp_dir.CleanUp()
def testFileResourcesDoubleQuotes(self):
'''Tests inlined image file resources if url() filename is double quoted.'''
tmp_dir = util.TempDir({
'test.css': '''
.image {
background: url("test.png");
}
''',
'test.png': 'PNG DATA',
'2x/test.png': '2x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('test.css'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
.image {
background: -webkit-image-set(url("data:image/png;base64,UE5HIERBVEE=") 1x, url("data:image/png;base64,MnggUE5HIERBVEE=") 2x);
}
'''))
tmp_dir.CleanUp()
def testFileResourcesNoQuotes(self):
'''Tests inlined image file resources when url() filename is unquoted.'''
tmp_dir = util.TempDir({
'test.css': '''
.image {
background: url(test.png);
}
''',
'test.png': 'PNG DATA',
'2x/test.png': '2x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('test.css'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
.image {
background: -webkit-image-set(url(data:image/png;base64,UE5HIERBVEE=) 1x, url(data:image/png;base64,MnggUE5HIERBVEE=) 2x);
}
'''))
tmp_dir.CleanUp()
def testFileResourcesNoFile(self):
'''Tests inlined image file resources without available high DPI assets.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<head>
<link rel="stylesheet" href="test.css">
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
''',
'test.css': '''
.image {
background: url('test.png');
}
''',
'test.png': 'PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<head>
<style>
.image {
background: url('data:image/png;base64,UE5HIERBVEE=');
}
</style>
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
'''))
tmp_dir.CleanUp()
def testThemeResources(self):
'''Tests inserting high DPI chrome://theme references.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<head>
<link rel="stylesheet" href="test.css">
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
''',
'test.css': '''
.image {
background: url('chrome://theme/IDR_RESOURCE_NAME');
content: url('chrome://theme/IDR_RESOURCE_NAME_WITH_Q?$1');
}
''',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<head>
<style>
.image {
background: -webkit-image-set(url('chrome://theme/IDR_RESOURCE_NAME') 1x, url('chrome://theme/IDR_RESOURCE_NAME@2x') 2x);
content: -webkit-image-set(url('chrome://theme/IDR_RESOURCE_NAME_WITH_Q?$1') 1x, url('chrome://theme/IDR_RESOURCE_NAME_WITH_Q@2x?$1') 2x);
}
</style>
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
'''))
tmp_dir.CleanUp()
def testRemoveUnsupportedScale(self):
'''Tests removing an unsupported scale factor from an explicit image-set.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<head>
<link rel="stylesheet" href="test.css">
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
''',
'test.css': '''
.image {
background: -webkit-image-set(url('test.png') 1x,
url('test1.4.png') 1.4x,
url('test1.8.png') 1.8x);
}
''',
'test.png': 'PNG DATA',
'test1.4.png': '1.4x PNG DATA',
'test1.8.png': '1.8x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '1.8x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<head>
<style>
.image {
background: -webkit-image-set(url('data:image/png;base64,UE5HIERBVEE=') 1x,
url('data:image/png;base64,MS44eCBQTkcgREFUQQ==') 1.8x);
}
</style>
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
'''))
tmp_dir.CleanUp()
def testExpandVariablesInFilename(self):
'''
Tests variable substitution in filenames while flattening images
with multiple scale factors.
'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<head>
<link rel="stylesheet" href="test.css">
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
''',
'test.css': '''
.image {
background: url('test[WHICH].png');
}
''',
'test1.png': 'PNG DATA',
'1.4x/test1.png': '1.4x PNG DATA',
'1.8x/test1.png': '1.8x PNG DATA',
})
def replacer(var, repl):
return lambda filename: filename.replace('[%s]' % var, repl)
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '1.4x,1.8x'})
html.SetAttributes({'flattenhtml': 'true'})
html.SetFilenameExpansionFunction(replacer('WHICH', '1'));
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<head>
<style>
.image {
background: -webkit-image-set(url('data:image/png;base64,UE5HIERBVEE=') 1x, url('data:image/png;base64,MS40eCBQTkcgREFUQQ==') 1.4x, url('data:image/png;base64,MS44eCBQTkcgREFUQQ==') 1.8x);
}
</style>
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
'''))
tmp_dir.CleanUp()
if __name__ == '__main__':
unittest.main()
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import sys
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_log import versionutils
from oslo_policy import opts
from oslo_policy import policy
CONF = cfg.CONF
_ENFORCER = None
# TODO(gmann): Remove setting the default value of config policy_file
# once oslo_policy change the default value to 'policy.yaml'.
# https://github.com/openstack/oslo.policy/blob/a626ad12fe5a3abd49d70e3e5b95589d279ab578/oslo_policy/opts.py#L49
DEFAULT_POLICY_FILE = 'policy.yaml'
opts.set_defaults(cfg.CONF, DEFAULT_POLICY_FILE)
# Generic policy check string for system administrators. These are the people
# who need the highest level of authorization to operate the deployment.
# They're allowed to create, read, update, or delete any system-specific
# resource. They can also operate on project-specific resources where
# applicable (e.g., cleaning up baremetal hosts)
SYSTEM_ADMIN = 'role:admin and system_scope:all'
# Generic policy check string for system users who don't require all the
# authorization that system administrators typically have. This persona, or
# check string, typically isn't used by default, but it's existence it useful
# in the event a deployment wants to offload some administrative action from
# system administrator to system members
SYSTEM_MEMBER = 'role:member and system_scope:all'
# Generic policy check string for read-only access to system-level resources.
# This persona is useful for someone who needs access for auditing or even
# support. These uses are also able to view project-specific resources where
# applicable (e.g., listing all volumes in the deployment, regardless of the
# project they belong to).
SYSTEM_READER = 'role:reader and system_scope:all'
deprecated_node_reason = """
The inspector API is now aware of system scope and default roles.
"""
default_policies = [
policy.RuleDefault(
'is_admin',
'role:admin or role:administrator or role:baremetal_admin',
description='Full read/write API access',
deprecated_for_removal=True,
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY),
policy.RuleDefault(
'is_observer',
'role:baremetal_observer',
description='Read-only API access',
deprecated_for_removal=True,
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY),
policy.RuleDefault(
'public_api',
'is_public_api:True',
description='Internal flag for public API routes'),
policy.RuleDefault(
'default',
'!',
description='Default API access policy'),
]
api_version_policies = [
policy.DocumentedRuleDefault(
'introspection',
'rule:public_api',
'Access the API root for available versions information',
[{'path': '/', 'method': 'GET'}]
),
policy.DocumentedRuleDefault(
'introspection:version',
'rule:public_api',
'Access the versioned API root for version information',
[{'path': '/{version}', 'method': 'GET'}]
),
]
deprecated_introspection_status = policy.DeprecatedRule(
name='introspection:status',
check_str='rule:is_admin or rule:is_observer',
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY
)
deprecated_introspection_start = policy.DeprecatedRule(
name='introspection:start',
check_str='rule:is_admin',
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY
)
deprecated_introspection_abort = policy.DeprecatedRule(
name='introspection:abort',
check_str='rule:is_admin',
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY
)
deprecated_introspection_data = policy.DeprecatedRule(
name='introspection:data',
check_str='rule:is_admin',
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY
)
deprecated_introspection_reapply = policy.DeprecatedRule(
name='introspection:reapply',
check_str='rule:is_admin',
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY
)
deprecated_introspection_rule_get = policy.DeprecatedRule(
name='introspection:rule:get',
check_str='rule:is_admin',
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY
)
deprecated_introspection_rule_delete = policy.DeprecatedRule(
name='introspection:rule:delete',
check_str='rule:is_admin',
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY
)
deprecated_introspection_rule_create = policy.DeprecatedRule(
name='introspection:rule:create',
check_str='rule:is_admin',
deprecated_reason=deprecated_node_reason,
deprecated_since=versionutils.deprecated.WALLABY
)
introspection_policies = [
policy.DocumentedRuleDefault(
name='introspection:continue',
check_str='rule:public_api',
description='Ramdisk callback to continue introspection',
operations=[{'path': '/continue', 'method': 'POST'}],
),
policy.DocumentedRuleDefault(
name='introspection:status',
check_str=SYSTEM_READER,
description='Get introspection status',
operations=[{'path': '/introspection', 'method': 'GET'},
{'path': '/introspection/{node_id}', 'method': 'GET'}],
deprecated_rule=deprecated_introspection_status
),
policy.DocumentedRuleDefault(
name='introspection:start',
check_str=SYSTEM_ADMIN,
description='Start introspection',
operations=[{'path': '/introspection/{node_id}', 'method': 'POST'}],
deprecated_rule=deprecated_introspection_start
),
policy.DocumentedRuleDefault(
name='introspection:abort',
check_str=SYSTEM_ADMIN,
description='Abort introspection',
operations=[{'path': '/introspection/{node_id}/abort',
'method': 'POST'}],
deprecated_rule=deprecated_introspection_abort
),
policy.DocumentedRuleDefault(
name='introspection:data',
check_str=SYSTEM_ADMIN,
description='Get introspection data',
operations=[{'path': '/introspection/{node_id}/data',
'method': 'GET'}],
deprecated_rule=deprecated_introspection_data
),
policy.DocumentedRuleDefault(
name='introspection:reapply',
check_str=SYSTEM_ADMIN,
description='Reapply introspection on stored data',
operations=[{'path': '/introspection/{node_id}/data/unprocessed',
'method': 'POST'}],
deprecated_rule=deprecated_introspection_reapply
),
]
rule_policies = [
policy.DocumentedRuleDefault(
name='introspection:rule:get',
check_str=SYSTEM_ADMIN,
description='Get introspection rule(s)',
operations=[{'path': '/rules', 'method': 'GET'},
{'path': '/rules/{rule_id}', 'method': 'GET'}],
deprecated_rule=deprecated_introspection_rule_get
),
policy.DocumentedRuleDefault(
name='introspection:rule:delete',
check_str=SYSTEM_ADMIN,
description='Delete introspection rule(s)',
operations=[{'path': '/rules', 'method': 'DELETE'},
{'path': '/rules/{rule_id}', 'method': 'DELETE'}],
deprecated_rule=deprecated_introspection_rule_delete
),
policy.DocumentedRuleDefault(
name='introspection:rule:create',
check_str=SYSTEM_ADMIN,
description='Create introspection rule',
operations=[{'path': '/rules', 'method': 'POST'}],
deprecated_rule=deprecated_introspection_rule_create
),
]
def list_policies():
"""Get list of all policies defined in code.
Used to register them all at runtime,
and by oslo-config-generator to generate sample policy files.
"""
policies = itertools.chain(
default_policies,
api_version_policies,
introspection_policies,
rule_policies)
return policies
@lockutils.synchronized('policy_enforcer')
def init_enforcer(policy_file=None, rules=None,
default_rule=None, use_conf=True):
"""Synchronously initializes the policy enforcer
:param policy_file: Custom policy file to use, if none is specified,
`CONF.oslo_policy.policy_file` will be used.
:param rules: Default dictionary / Rules to use. It will be
considered just in the first instantiation.
:param default_rule: Default rule to use,
CONF.oslo_policy.policy_default_rule will
be used if none is specified.
:param use_conf: Whether to load rules from config file.
"""
global _ENFORCER
if _ENFORCER:
return
_ENFORCER = policy.Enforcer(
CONF, policy_file=policy_file,
rules=rules,
default_rule=default_rule,
use_conf=use_conf)
# NOTE(gmann): Explictly disable the warnings for policies
# changing their default check_str. With new RBAC policy
# work, all the policy defaults have been changed and warning for
# each policy started filling the logs limit for various tool.
# Once we move to new defaults only world then we can enable these
# warning again.
_ENFORCER.suppress_default_change_warnings = True
_ENFORCER.register_defaults(list_policies())
def get_enforcer():
"""Provides access to the single instance of Policy enforcer."""
if not _ENFORCER:
init_enforcer()
return _ENFORCER
def get_oslo_policy_enforcer():
"""Get the enforcer instance to generate policy files.
This method is for use by oslopolicy CLI scripts.
Those scripts need the 'output-file' and 'namespace' options,
but having those in sys.argv means loading the inspector config options
will fail as those are not expected to be present.
So we pass in an arg list with those stripped out.
"""
conf_args = []
# Start at 1 because cfg.CONF expects the equivalent of sys.argv[1:]
i = 1
while i < len(sys.argv):
if sys.argv[i].strip('-') in ['namespace', 'output-file']:
# e.g. --namespace <somestring>
i += 2
continue
conf_args.append(sys.argv[i])
i += 1
cfg.CONF(conf_args, project='ironic-inspector')
return get_enforcer()
def authorize(rule, target, creds, *args, **kwargs):
"""A shortcut for policy.Enforcer.authorize()
Checks authorization of a rule against the target and credentials, and
raises an exception if the rule is not defined.
args and kwargs are passed directly to oslo.policy Enforcer.authorize
Always returns True if CONF.auth_strategy != keystone.
:param rule: name of a registered oslo.policy rule
:param target: dict-like structure to check rule against
:param creds: dict of policy values from request
:returns: True if request is authorized against given policy,
False otherwise
:raises: oslo_policy.policy.PolicyNotRegistered if supplied policy
is not registered in oslo_policy
"""
if CONF.auth_strategy != 'keystone':
return True
enforcer = get_enforcer()
rule = CONF.oslo_policy.policy_default_rule if rule is None else rule
return enforcer.authorize(rule, target, creds, *args, **kwargs)
|
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for Chromium WebUI resources.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools, and see
http://www.chromium.org/developers/web-development-style-guide for the rules
we're checking against here.
"""
# TODO(dbeam): Real CSS parser? https://github.com/danbeam/css-py/tree/css3
class CSSChecker(object):
def __init__(self, input_api, output_api, file_filter=None):
self.input_api = input_api
self.output_api = output_api
self.file_filter = file_filter
def RunChecks(self):
# We use this a lot, so make a nick name variable.
re = self.input_api.re
def _collapseable_hex(s):
return (len(s) == 6 and s[0] == s[1] and s[2] == s[3] and s[4] == s[5])
def _is_gray(s):
return s[0] == s[1] == s[2] if len(s) == 3 else s[0:2] == s[2:4] == s[4:6]
def _remove_all(s):
return _remove_grit(_remove_ats(_remove_comments(s)))
def _remove_ats(s):
at_reg = re.compile(r"""
@(?!\d+x\b)\w+[^'"]*?{ # @at-keyword selector junk {, not @2x
(.*{.*?})+ # inner { curly } blocks, rules, and selector
.*?} # stuff up to the first end curly }""",
re.DOTALL | re.VERBOSE)
return at_reg.sub('\\1', s)
def _remove_comments(s):
return re.sub(re.compile(r'/\*.*?\*/', re.DOTALL), '', s)
def _remove_grit(s):
grit_reg = re.compile(r"""
<if[^>]+>.*?<\s*/\s*if[^>]*>| # <if> contents </if>
<include[^>]+> # <include>""",
re.DOTALL | re.VERBOSE)
return re.sub(grit_reg, '', s)
def _rgb_from_hex(s):
if len(s) == 3:
r, g, b = s[0] + s[0], s[1] + s[1], s[2] + s[2]
else:
r, g, b = s[0:2], s[2:4], s[4:6]
return int(r, base=16), int(g, base=16), int(b, base=16)
def _strip_prefix(s):
return re.sub(r'^-(?:o|ms|moz|khtml|webkit)-', '', s)
def alphabetize_props(contents):
errors = []
for rule in re.finditer(r'{(.*?)}', contents, re.DOTALL):
semis = map(lambda t: t.strip(), rule.group(1).split(';'))[:-1]
rules = filter(lambda r: ': ' in r, semis)
props = map(lambda r: r[0:r.find(':')], rules)
if props != sorted(props):
errors.append(' %s;\n' % (';\n '.join(rules)))
return errors
def braces_have_space_before_and_nothing_after(line):
brace_space_reg = re.compile(r"""
(?:^|\S){| # selector{ or selector\n{ or
{\s*\S+\s* # selector { with stuff after it
$ # must be at the end of a line""",
re.VERBOSE)
return brace_space_reg.search(line)
def classes_use_dashes(line):
# Intentionally dumbed down version of CSS 2.1 grammar for class without
# non-ASCII, escape chars, or whitespace.
class_reg = re.compile(r"""
\.(-?[\w-]+).* # ., then maybe -, then alpha numeric and -
[,{]\s*$ # selectors should end with a , or {""",
re.VERBOSE)
m = class_reg.search(line)
if not m:
return False
class_name = m.group(1)
return class_name.lower() != class_name or '_' in class_name
def close_brace_on_new_line(line):
# Ignore single frames in a @keyframe, i.e. 0% { margin: 50px; }
frame_reg = re.compile(r"""
\s*(from|to|\d+%)\s*{ # 50% {
\s*[\w-]+: # rule:
(\s*[\w\(\), -]+)+\s*; # value;
\s*}\s* # }""",
re.VERBOSE)
return ('}' in line and re.search(r'[^ }]', line) and
not frame_reg.match(line))
def colons_have_space_after(line):
colon_space_reg = re.compile(r"""
(?<!data) # ignore data URIs
:(?!//) # ignore url(http://), etc.
\S[^;]+;\s* # only catch one-line rules for now""",
re.VERBOSE)
return colon_space_reg.search(line)
def favor_single_quotes(line):
return '"' in line
# Shared between hex_could_be_shorter and rgb_if_not_gray.
hex_reg = re.compile(r"""
\#([a-fA-F0-9]{3}|[a-fA-F0-9]{6}) # pound followed by 3 or 6 hex digits
(?=[^\w-]|$) # no more alphanum chars or at EOL
(?!.*(?:{.*|,\s*)$) # not in a selector""",
re.VERBOSE)
def hex_could_be_shorter(line):
m = hex_reg.search(line)
return (m and _is_gray(m.group(1)) and _collapseable_hex(m.group(1)))
def rgb_if_not_gray(line):
m = hex_reg.search(line)
return (m and not _is_gray(m.group(1)))
small_seconds_reg = re.compile(r"""
(?:^|[^\w-]) # start of a line or a non-alphanumeric char
(0?\.[0-9]+)s # 1.0s
(?!-?[\w-]) # no following - or alphanumeric chars""",
re.VERBOSE)
def milliseconds_for_small_times(line):
return small_seconds_reg.search(line)
def suggest_ms_from_s(line):
ms = int(float(small_seconds_reg.search(line).group(1)) * 1000)
return ' (replace with %dms)' % ms
def no_data_uris_in_source_files(line):
return re.search(r'\(\s*\s*data:', line)
def no_quotes_in_url(line):
return re.search('url\s*\(\s*["\']', line, re.IGNORECASE)
def one_rule_per_line(line):
one_rule_reg = re.compile(r"""
[\w-](?<!data): # a rule: but no data URIs
(?!//)[^;]+; # value; ignoring colons in protocols://
\s*[^ }]\s* # any non-space after the end colon""",
re.VERBOSE)
return one_rule_reg.search(line)
def pseudo_elements_double_colon(contents):
pseudo_elements = ['after',
'before',
'calendar-picker-indicator',
'color-swatch',
'color-swatch-wrapper',
'date-and-time-container',
'date-and-time-value',
'datetime-edit',
'datetime-edit-ampm-field',
'datetime-edit-day-field',
'datetime-edit-hour-field',
'datetime-edit-millisecond-field',
'datetime-edit-minute-field',
'datetime-edit-month-field',
'datetime-edit-second-field',
'datetime-edit-text',
'datetime-edit-week-field',
'datetime-edit-year-field',
'details-marker',
'file-upload-button',
'first-letter',
'first-line',
'inner-spin-button',
'input-placeholder',
'input-speech-button',
'keygen-select',
'media-slider-container',
'media-slider-thumb',
'meter-bar',
'meter-even-less-good-value',
'meter-inner-element',
'meter-optimum-value',
'meter-suboptimum-value',
'progress-bar',
'progress-inner-element',
'progress-value',
'resizer',
'scrollbar',
'scrollbar-button',
'scrollbar-corner',
'scrollbar-thumb',
'scrollbar-track',
'scrollbar-track-piece',
'search-cancel-button',
'search-decoration',
'search-results-button',
'search-results-decoration',
'selection',
'slider-container',
'slider-runnable-track',
'slider-thumb',
'textfield-decoration-container',
'validation-bubble',
'validation-bubble-arrow',
'validation-bubble-arrow-clipper',
'validation-bubble-heading',
'validation-bubble-message',
'validation-bubble-text-block']
pseudo_reg = re.compile(r"""
(?<!:): # a single colon, i.e. :after but not ::after
([a-zA-Z-]+) # a pseudo element, class, or function
(?=[^{}]+?{) # make sure a selector, not inside { rules }""",
re.MULTILINE | re.VERBOSE)
errors = []
for p in re.finditer(pseudo_reg, contents):
pseudo = p.group(1).strip().splitlines()[0]
if _strip_prefix(pseudo.lower()) in pseudo_elements:
errors.append(' :%s (should be ::%s)' % (pseudo, pseudo))
return errors
def one_selector_per_line(contents):
any_reg = re.compile(r"""
:(?:-webkit-)?any\(.*?\) # :-webkit-any(a, b, i) selector""",
re.DOTALL | re.VERBOSE)
multi_sels_reg = re.compile(r"""
(?:}\s*)? # ignore 0% { blah: blah; }, from @keyframes
([^,]+,(?=[^{}]+?{) # selector junk {, not in a { rule }
.*[,{])\s*$ # has to end with , or {""",
re.MULTILINE | re.VERBOSE)
errors = []
for b in re.finditer(multi_sels_reg, re.sub(any_reg, '', contents)):
errors.append(' ' + b.group(1).strip().splitlines()[-1:][0])
return errors
def suggest_rgb_from_hex(line):
suggestions = ['rgb(%d, %d, %d)' % _rgb_from_hex(h.group(1))
for h in re.finditer(hex_reg, line)]
return ' (replace with %s)' % ', '.join(suggestions)
def suggest_short_hex(line):
h = hex_reg.search(line).group(1)
return ' (replace with #%s)' % (h[0] + h[2] + h[4])
webkit_before_or_after_reg = re.compile(r'-webkit-(\w+-)(after|before):')
def suggest_top_or_bottom(line):
prop, pos = webkit_before_or_after_reg.search(line).groups()
top_or_bottom = 'top' if pos == 'before' else 'bottom'
return ' (replace with %s)' % (prop + top_or_bottom)
def webkit_before_or_after(line):
return webkit_before_or_after_reg.search(line)
def zero_width_lengths(contents):
hsl_reg = re.compile(r"""
hsl\([^\)]* # hsl(<maybe stuff>
(?:[, ]|(?<=\()) # a comma or space not followed by a (
(?:0?\.?)?0% # some equivalent to 0%""",
re.VERBOSE)
zeros_reg = re.compile(r"""
^.*(?:^|[^0-9.]) # start/non-number
(?:\.0|0(?:\.0? # .0, 0, or 0.0
|px|em|%|in|cm|mm|pc|pt|ex)) # a length unit
(?:\D|$) # non-number/end
(?=[^{}]+?}).*$ # only { rules }""",
re.MULTILINE | re.VERBOSE)
errors = []
for z in re.finditer(zeros_reg, contents):
first_line = z.group(0).strip().splitlines()[0]
if not hsl_reg.search(first_line):
errors.append(' ' + first_line)
return errors
# NOTE: Currently multi-line checks don't support 'after'. Instead, add
# suggestions while parsing the file so another pass isn't necessary.
added_or_modified_files_checks = [
{ 'desc': 'Alphabetize properties and list vendor specific (i.e. '
'-webkit) above standard.',
'test': alphabetize_props,
'multiline': True,
},
{ 'desc': 'Start braces ({) end a selector, have a space before them '
'and no rules after.',
'test': braces_have_space_before_and_nothing_after,
},
{ 'desc': 'Classes use .dash-form.',
'test': classes_use_dashes,
},
{ 'desc': 'Always put a rule closing brace (}) on a new line.',
'test': close_brace_on_new_line,
},
{ 'desc': 'Colons (:) should have a space after them.',
'test': colons_have_space_after,
},
{ 'desc': 'Use single quotes (\') instead of double quotes (") in '
'strings.',
'test': favor_single_quotes,
},
{ 'desc': 'Use abbreviated hex (#rgb) when in form #rrggbb.',
'test': hex_could_be_shorter,
'after': suggest_short_hex,
},
{ 'desc': 'Use milliseconds for time measurements under 1 second.',
'test': milliseconds_for_small_times,
'after': suggest_ms_from_s,
},
{ 'desc': "Don't use data URIs in source files. Use grit instead.",
'test': no_data_uris_in_source_files,
},
{ 'desc': "Don't use quotes in url().",
'test': no_quotes_in_url,
},
{ 'desc': 'One rule per line (what not to do: color: red; margin: 0;).',
'test': one_rule_per_line,
},
{ 'desc': 'One selector per line (what not to do: a, b {}).',
'test': one_selector_per_line,
'multiline': True,
},
{ 'desc': 'Pseudo-elements should use double colon (i.e. ::after).',
'test': pseudo_elements_double_colon,
'multiline': True,
},
{ 'desc': 'Use rgb() over #hex when not a shade of gray (like #333).',
'test': rgb_if_not_gray,
'after': suggest_rgb_from_hex,
},
{ 'desc': 'Use *-top/bottom instead of -webkit-*-before/after.',
'test': webkit_before_or_after,
'after': suggest_top_or_bottom,
},
{ 'desc': 'Use "0" for zero-width lengths (i.e. 0px -> 0)',
'test': zero_width_lengths,
'multiline': True,
},
]
results = []
affected_files = self.input_api.AffectedFiles(include_deletes=False,
file_filter=self.file_filter)
files = []
for f in affected_files:
# Remove all /*comments*/, @at-keywords, and grit <if|include> tags; we're
# not using a real parser. TODO(dbeam): Check alpha in <if> blocks.
file_contents = _remove_all('\n'.join(f.NewContents()))
files.append((f.LocalPath(), file_contents))
# Only look at CSS files for now.
for f in filter(lambda f: f[0].endswith('.css'), files):
file_errors = []
for check in added_or_modified_files_checks:
# If the check is multiline, it receieves the whole file and gives us
# back a list of things wrong. If the check isn't multiline, we pass it
# each line and the check returns something truthy if there's an issue.
if ('multiline' in check and check['multiline']):
assert not 'after' in check
check_errors = check['test'](f[1])
if len(check_errors) > 0:
file_errors.append('- %s\n%s' %
(check['desc'], '\n'.join(check_errors).rstrip()))
else:
check_errors = []
lines = f[1].splitlines()
for lnum, line in enumerate(lines):
if check['test'](line):
error = ' ' + line.strip()
if 'after' in check:
error += check['after'](line)
check_errors.append(error)
if len(check_errors) > 0:
file_errors.append('- %s\n%s' %
(check['desc'], '\n'.join(check_errors)))
if file_errors:
results.append(self.output_api.PresubmitPromptWarning(
'%s:\n%s' % (f[0], '\n\n'.join(file_errors))))
if results:
# Add your name if you're here often mucking around in the code.
authors = ['dbeam@chromium.org']
results.append(self.output_api.PresubmitNotifyResult(
'Was the CSS checker useful? Send feedback or hate mail to %s.' %
', '.join(authors)))
return results
|
|
import json
import string
import uuid
from collections import defaultdict
from copy import copy
from datetime import datetime
from django.apps import apps
from django.conf import settings
from django.db import models
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import gettext
import markupsafe
import olympia.core.logger
from olympia import amo, constants
from olympia.access.models import Group
from olympia.addons.models import Addon
from olympia.amo.fields import PositiveAutoField
from olympia.amo.models import BaseQuerySet, LongNameIndex, ManagerBase, ModelBase
from olympia.bandwagon.models import Collection
from olympia.blocklist.models import Block
from olympia.files.models import File
from olympia.ratings.models import Rating
from olympia.reviewers.models import CannedResponse, ReviewActionReason
from olympia.tags.models import Tag
from olympia.users.models import UserProfile
from olympia.users.templatetags.jinja_helpers import user_link
from olympia.versions.models import Version
log = olympia.core.logger.getLogger('z.amo.activity')
# Number of times a token can be used.
MAX_TOKEN_USE_COUNT = 100
GENERIC_USER_NAME = gettext('Add-ons Review Team')
class GenericMozillaUser(UserProfile):
class Meta:
proxy = True
@property
def name(self):
return GENERIC_USER_NAME
class ActivityLogToken(ModelBase):
id = PositiveAutoField(primary_key=True)
version = models.ForeignKey(Version, related_name='token', on_delete=models.CASCADE)
user = models.ForeignKey(
'users.UserProfile',
related_name='activity_log_tokens',
on_delete=models.CASCADE,
)
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
use_count = models.IntegerField(
default=0, help_text='Stores the number of times the token has been used'
)
class Meta:
db_table = 'log_activity_tokens'
constraints = [
models.UniqueConstraint(fields=('version', 'user'), name='version_id'),
]
def is_expired(self):
return self.use_count >= MAX_TOKEN_USE_COUNT
def is_valid(self):
return (
not self.is_expired()
and self.version
== self.version.addon.find_latest_version(
channel=self.version.channel, exclude=()
)
)
def expire(self):
self.update(use_count=MAX_TOKEN_USE_COUNT)
def increment_use(self):
self.__class__.objects.filter(pk=self.pk).update(
use_count=models.expressions.F('use_count') + 1
)
self.use_count = self.use_count + 1
class ActivityLogEmails(ModelBase):
"""A log of message ids of incoming emails so we don't duplicate process
them."""
messageid = models.CharField(max_length=255, unique=True)
class Meta:
db_table = 'log_activity_emails'
class AddonLog(ModelBase):
"""
This table is for indexing the activity log by addon.
"""
addon = models.ForeignKey(Addon, on_delete=models.CASCADE)
activity_log = models.ForeignKey('ActivityLog', on_delete=models.CASCADE)
class Meta:
db_table = 'log_activity_addon'
ordering = ('-created',)
def transfer(self, new_addon):
try:
# arguments is a structure:
# ``arguments = [{'addons.addon':12}, {'addons.addon':1}, ... ]``
arguments = json.loads(self.activity_log._arguments)
except Exception:
log.info(
'unserializing data from addon_log failed: %s' % self.activity_log.id
)
return None
new_arguments = []
for item in arguments:
if item.get('addons.addon', 0) == self.addon.id:
new_arguments.append({'addons.addon': new_addon.id})
else:
new_arguments.append(item)
self.activity_log.update(_arguments=json.dumps(new_arguments))
self.update(addon=new_addon)
class CommentLog(ModelBase):
"""
This table is for indexing the activity log by comment.
"""
activity_log = models.ForeignKey('ActivityLog', on_delete=models.CASCADE)
comments = models.TextField()
class Meta:
db_table = 'log_activity_comment'
ordering = ('-created',)
class VersionLog(ModelBase):
"""
This table is for indexing the activity log by version.
"""
activity_log = models.ForeignKey('ActivityLog', on_delete=models.CASCADE)
version = models.ForeignKey(Version, on_delete=models.CASCADE)
class Meta:
db_table = 'log_activity_version'
ordering = ('-created',)
class ReviewActionReasonLog(ModelBase):
"""
This table allows ReviewActionReasons to be assigned to ActivityLog entries.
"""
activity_log = models.ForeignKey('ActivityLog', on_delete=models.CASCADE)
reason = models.ForeignKey(ReviewActionReason, on_delete=models.CASCADE)
class Meta:
db_table = 'log_activity_review_action_reason'
ordering = ('-created',)
class GroupLog(ModelBase):
"""
This table is for indexing the activity log by access group.
"""
id = PositiveAutoField(primary_key=True)
activity_log = models.ForeignKey('ActivityLog', on_delete=models.CASCADE)
group = models.ForeignKey(Group, on_delete=models.CASCADE)
class Meta:
db_table = 'log_activity_group'
ordering = ('-created',)
class BlockLog(ModelBase):
"""
This table is for indexing the activity log by Blocklist Block.
"""
id = PositiveAutoField(primary_key=True)
activity_log = models.ForeignKey('ActivityLog', on_delete=models.CASCADE)
block = models.ForeignKey(Block, on_delete=models.SET_NULL, null=True)
guid = models.CharField(max_length=255, null=False)
class Meta:
db_table = 'log_activity_block'
ordering = ('-created',)
class IPLog(ModelBase):
"""
This table is for indexing the activity log by IP (only for specific
actions).
"""
activity_log = models.ForeignKey('ActivityLog', on_delete=models.CASCADE)
ip_address = models.CharField(max_length=45)
class Meta:
db_table = 'log_activity_ip'
ordering = ('-created',)
indexes = [
LongNameIndex(
fields=('ip_address',),
name='log_activity_ip_ip_address_ba36172a',
),
]
class DraftComment(ModelBase):
"""A model that allows us to draft comments for reviews before we have
an ActivityLog instance ready.
This is being used by the commenting API by the code-manager.
"""
id = PositiveAutoField(primary_key=True)
version = models.ForeignKey(Version, on_delete=models.CASCADE)
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
filename = models.CharField(max_length=255, null=True, blank=True)
lineno = models.PositiveIntegerField(null=True)
canned_response = models.ForeignKey(
CannedResponse, null=True, default=None, on_delete=models.SET_DEFAULT
)
comment = models.TextField(blank=True)
class Meta:
db_table = 'log_activity_comment_draft'
class ActivityLogQuerySet(BaseQuerySet):
def default_transformer(self, logs):
ActivityLog.arguments_builder(logs)
class ActivityLogManager(ManagerBase):
_queryset_class = ActivityLogQuerySet
def get_queryset(self):
qs = super().get_queryset()
qs = qs.transform(qs.default_transformer).prefetch_related('user')
return qs
def for_addons(self, addons):
if isinstance(addons, Addon):
addons = (addons,)
return self.filter(addonlog__addon__in=addons)
def for_versions(self, versions):
if isinstance(versions, Version):
versions = (versions,)
return self.filter(versionlog__version__in=versions)
def for_groups(self, groups):
if isinstance(groups, Group):
groups = (groups,)
return self.filter(grouplog__group__in=groups)
def for_block(self, block):
return self.filter(blocklog__block=block)
def for_guidblock(self, guid):
return self.filter(blocklog__guid=guid)
def for_developer(self):
return self.exclude(
action__in=constants.activity.LOG_ADMINS
+ constants.activity.LOG_HIDE_DEVELOPER
)
def admin_events(self):
return self.filter(action__in=constants.activity.LOG_ADMINS)
def moderation_events(self):
return self.filter(action__in=constants.activity.LOG_RATING_MODERATION)
def review_queue(self):
qs = self._by_type()
return qs.filter(action__in=constants.activity.LOG_REVIEW_QUEUE).exclude(
user__id=settings.TASK_USER_ID
)
def review_log(self):
qs = self._by_type()
return qs.filter(
action__in=constants.activity.LOG_REVIEWER_REVIEW_ACTION
).exclude(user__id=settings.TASK_USER_ID)
def total_ratings(self, theme=False):
"""Return the top users, and their # of reviews."""
qs = self._by_type()
action_ids = (
[amo.LOG.THEME_REVIEW.id]
if theme
else constants.activity.LOG_REVIEWER_REVIEW_ACTION
)
return (
qs.values('user', 'user__display_name', 'user__username')
.filter(action__in=action_ids)
.exclude(user__id=settings.TASK_USER_ID)
.annotate(approval_count=models.Count('id'))
.order_by('-approval_count')
)
def monthly_reviews(self, theme=False):
"""Return the top users for the month, and their # of reviews."""
qs = self._by_type()
now = datetime.now()
created_date = datetime(now.year, now.month, 1)
actions = (
[constants.activity.LOG.THEME_REVIEW.id]
if theme
else constants.activity.LOG_REVIEWER_REVIEW_ACTION
)
return (
qs.values('user', 'user__display_name', 'user__username')
.filter(created__gte=created_date, action__in=actions)
.exclude(user__id=settings.TASK_USER_ID)
.annotate(approval_count=models.Count('id'))
.order_by('-approval_count')
)
def user_approve_reviews(self, user):
qs = self._by_type()
return qs.filter(
action__in=constants.activity.LOG_REVIEWER_REVIEW_ACTION, user__id=user.id
)
def current_month_user_approve_reviews(self, user):
now = datetime.now()
ago = datetime(now.year, now.month, 1)
return self.user_approve_reviews(user).filter(created__gte=ago)
def user_position(self, values_qs, user):
try:
return (
next(
i
for (i, d) in enumerate(list(values_qs))
if d.get('user') == user.id
)
+ 1
)
except StopIteration:
return None
def total_ratings_user_position(self, user, theme=False):
return self.user_position(self.total_ratings(theme), user)
def monthly_reviews_user_position(self, user, theme=False):
return self.user_position(self.monthly_reviews(theme), user)
def _by_type(self):
qs = self.get_queryset()
table = 'log_activity_addon'
return qs.extra(
tables=[table],
where=['{}.activity_log_id={}.id'.format(table, 'log_activity')],
)
class SafeFormatter(string.Formatter):
"""A replacement for str.format that escapes interpolated values."""
def get_field(self, *args, **kw):
# obj is the value getting interpolated into the string.
obj, used_key = super().get_field(*args, **kw)
return markupsafe.escape(obj), used_key
class ActivityLog(ModelBase):
TYPES = sorted(
(value.id, key) for key, value in constants.activity.LOG_BY_ID.items()
)
user = models.ForeignKey('users.UserProfile', null=True, on_delete=models.SET_NULL)
action = models.SmallIntegerField(choices=TYPES)
_arguments = models.TextField(blank=True, db_column='arguments')
_details = models.TextField(blank=True, db_column='details')
objects = ActivityLogManager()
formatter = SafeFormatter()
class Meta:
db_table = 'log_activity'
ordering = ('-created',)
indexes = [
models.Index(fields=('action',), name='log_activity_1bd4707b'),
models.Index(fields=('created',), name='log_activity_created_idx'),
]
def f(self, *args, **kw):
"""Calls SafeFormatter.format and returns a Markup string."""
# SafeFormatter escapes everything so this is safe.
return markupsafe.Markup(self.formatter.format(*args, **kw))
@classmethod
def transformer_anonymize_user_for_developer(cls, logs):
"""Replace the user with a generic user in actions where it shouldn't
be shown to a developer.
"""
generic_user = GenericMozillaUser()
for log in logs:
if log.action not in constants.activity.LOG_SHOW_USER_TO_DEVELOPER:
log.user = generic_user
@classmethod
def arguments_builder(cls, activities):
def handle_renames(value):
# Cope with renames of key models (use the original model name like
# it was in the ActivityLog as the key so that we can find it
# later)
return 'ratings.rating' if value == 'reviews.review' else value
# We need to do 2 passes on each log:
# - The first time, gather the references to every instance we need
# - The second time, we built querysets for all instances of the same
# type, pick data from that queryset.
#
# Because it relies on in_bulk(), this method needs the pks to be of a
# consistent type, which doesn't appear to be guaranteed in our
# existing data. For this reason, it forces a conversion to int. If we
# ever want to store ActivityLog items pointing to models using a non
# integer PK field, we'll need to make this a little smarter.
instances_to_load = defaultdict(list)
instances = {}
for activity in activities:
try:
# `arguments_data` will be a list of dicts like:
# `[{'addons.addon':12}, {'addons.addon':1}, ... ]`
activity.arguments_data = json.loads(activity._arguments)
except Exception as e:
log.info('unserializing data from activity_log failed: %s', activity.id)
log.info(e)
activity.arguments_data = []
for item in activity.arguments_data:
# Each 'item' should have one key and one value only.
name, pk = list(item.items())[0]
if name not in ('str', 'int', 'null') and pk:
# Convert pk to int to have consistent data for when we
# call .in_bulk() later.
name = handle_renames(name)
instances_to_load[name].append(int(pk))
# At this point, instances_to_load is a dict of "names" that
# each have a bunch of pks we want to load.
for name, pks in instances_to_load.items():
(app_label, model_name) = name.split('.')
model = apps.get_model(app_label, model_name)
# Load the instances, avoiding transformers other than translations
# and coping with soft-deleted models and unlisted add-ons.
qs = model.get_unfiltered_manager().all()
if hasattr(qs, 'only_translations'):
qs = qs.only_translations()
instances[name] = qs.in_bulk(pks)
# instances is now a dict of "model names" that each have a dict of
# {pk: instance}. We do our second pass on the logs to build the
# "arguments" property from that data, which is a list of the instances
# that each particular log has, in the correct order.
for activity in activities:
objs = []
# We preloaded that property earlier
for item in activity.arguments_data:
# As above, each 'item' should have one key and one value only.
name, pk = list(item.items())[0]
if name in ('str', 'int', 'null'):
# It's not actually a model reference, just return the
# value directly.
objs.append(pk)
elif pk:
# Fetch the instance from the cache we built.
name = handle_renames(name)
obj = instances[name].get(int(pk))
# Most of the time, we're eventually going to call
# to_string() on each ActivityLog that we're processing
# here. For some of the models, that will result in a call
# to <model>.get_absolute_url(), which in turn can cause an
# extra SQL query because some parent model is needed to
# build the URL.
# It's difficult to predict what we'll need as ActivitLog
# is fairly generic, but we know Addon is going to be
# needed in some cases for sure (Version, Rating) so if
# we're dealing with objects that have an `addon_id`
# property, and we have already fetched the corresponding
# Addon instance, set the `addon` property on the object
# to the Addon instance we already have to avoid the extra
# SQL query.
addon_id = getattr(obj, 'addon_id', None)
if addon := instances.get('addons.addon', {}).get(addon_id):
obj.addon = addon
objs.append(obj)
# Override the arguments cached_property with what we got.
activity.arguments = objs
@cached_property
def arguments(self):
# This is a fallback : in 99% of the cases we should not be using this
# but go through the default transformer instead, which executes
# arguments_builder on the whole list of items in the queryset,
# allowing us to fetch the instances in arguments in an optimized
# manner.
self.arguments_builder([self])
return self.arguments
def set_arguments(self, args=None):
"""
Takes an object or a tuple of objects and serializes them and stores it
in the db as a json string.
"""
if args is None:
args = []
if not isinstance(args, (list, tuple)):
args = (args,)
serialize_me = []
for arg in args:
if isinstance(arg, str):
serialize_me.append({'str': arg})
elif isinstance(arg, int):
serialize_me.append({'int': arg})
elif isinstance(arg, tuple):
# Instead of passing an addon instance you can pass a tuple:
# (Addon, 3) for Addon with pk=3
serialize_me.append(dict(((str(arg[0]._meta), arg[1]),)))
else:
serialize_me.append(dict(((str(arg._meta), arg.pk),)))
self._arguments = json.dumps(serialize_me)
@property
def details(self):
if self._details:
return json.loads(self._details)
@details.setter
def details(self, data):
self._details = json.dumps(data)
@property
def log(self):
return constants.activity.LOG_BY_ID[self.action]
def to_string(self, type_=None):
log_type = constants.activity.LOG_BY_ID[self.action]
if type_ and hasattr(log_type, '%s_format' % type_):
format = getattr(log_type, '%s_format' % type_)
else:
format = log_type.format
# We need to copy arguments so we can remove elements from it
# while we loop over self.arguments.
arguments = copy(self.arguments)
addon = None
rating = None
version = None
collection = None
tag = None
group = None
file_ = None
status = None
for arg in self.arguments:
if isinstance(arg, Addon) and not addon:
if arg.has_listed_versions():
addon = self.f(
'<a href="{0}">{1}</a>', arg.get_absolute_url(), arg.name
)
else:
addon = self.f('{0}', arg.name)
arguments.remove(arg)
if isinstance(arg, Rating) and not rating:
rating = self.f(
'<a href="{0}">{1}</a>', arg.get_absolute_url(), gettext('Review')
)
arguments.remove(arg)
if isinstance(arg, Version) and not version:
text = gettext('Version {0}')
if arg.channel == amo.RELEASE_CHANNEL_LISTED:
version = self.f(
'<a href="{1}">%s</a>' % text,
arg.version,
arg.get_absolute_url(),
)
else:
version = self.f(text, arg.version)
arguments.remove(arg)
if isinstance(arg, Collection) and not collection:
collection = self.f(
'<a href="{0}">{1}</a>', arg.get_absolute_url(), arg.name
)
arguments.remove(arg)
if isinstance(arg, Tag) and not tag:
if arg.can_reverse():
tag = self.f(
'<a href="{0}">{1}</a>', arg.get_absolute_url(), arg.tag_text
)
else:
tag = self.f('{0}', arg.tag_text)
if isinstance(arg, Group) and not group:
group = arg.name
arguments.remove(arg)
if isinstance(arg, File) and not file_:
validation = 'passed'
if self.action in (
amo.LOG.UNLISTED_SIGNED.id,
amo.LOG.UNLISTED_SIGNED_VALIDATION_FAILED.id,
):
validation = 'ignored'
file_ = self.f(
'<a href="{0}">{1}</a> (validation {2})',
arg.get_absolute_url(),
arg.filename,
validation,
)
arguments.remove(arg)
if self.action == amo.LOG.CHANGE_STATUS.id and not isinstance(arg, Addon):
# Unfortunately, this action has been abused in the past and
# the non-addon argument could be a string or an int. If it's
# an int, we want to retrieve the string and translate it.
if isinstance(arg, int) and arg in amo.STATUS_CHOICES_ADDON:
status = gettext(amo.STATUS_CHOICES_ADDON[arg])
else:
# It's not an int or not one of the choices, so assume it's
# a string or an unknown int we want to display as-is.
status = arg
arguments.remove(arg)
user = user_link(self.user)
try:
kw = {
'addon': addon,
'rating': rating,
'version': version,
'collection': collection,
'tag': tag,
'user': user,
'group': group,
'file': file_,
'status': status,
}
return self.f(str(format), *arguments, **kw)
except (AttributeError, KeyError, IndexError):
log.warning('%d contains garbage data' % (self.id or 0))
return 'Something magical happened.'
def __str__(self):
return self.to_string()
def __html__(self):
return self
@classmethod
def create(cls, action, *args, **kw):
"""
e.g. ActivityLog.create(amo.LOG.CREATE_ADDON, addon),
ActivityLog.create(amo.LOG.ADD_FILE_TO_VERSION, file, version)
In case of circular import you can use `olympia.activity.log_create()`
"""
from olympia import core
user = kw.get('user', core.get_user())
if not user:
log.warning('Activity log called with no user: %s' % action.id)
return
# We make sure that we take the timestamp if provided, instead of
# creating a new one, especially useful for log entries created
# in a loop.
al = ActivityLog(
user=user, action=action.id, created=kw.get('created', timezone.now())
)
al.set_arguments(args)
if 'details' in kw:
al.details = kw['details']
al.save()
if 'details' in kw and 'comments' in al.details:
CommentLog.objects.create(
comments=al.details['comments'],
activity_log=al,
created=kw.get('created', timezone.now()),
)
for arg in args:
create_kwargs = {
'activity_log': al,
'created': kw.get('created', timezone.now()),
}
if isinstance(arg, tuple):
class_ = arg[0]
id_ = arg[1]
else:
class_ = arg.__class__
id_ = arg.id if isinstance(arg, ModelBase) else None
if class_ == Addon:
AddonLog.objects.create(addon_id=id_, **create_kwargs)
elif class_ == Version:
VersionLog.objects.create(version_id=id_, **create_kwargs)
elif class_ == Group:
GroupLog.objects.create(group_id=id_, **create_kwargs)
elif class_ == Block:
BlockLog.objects.create(block_id=id_, guid=arg.guid, **create_kwargs)
elif class_ == ReviewActionReason:
ReviewActionReasonLog.objects.create(reason_id=id_, **create_kwargs)
if getattr(action, 'store_ip', False):
# Index specific actions by their IP address. Note that the caller
# must take care of overriding remote addr if the action is created
# from a task.
IPLog.objects.create(
ip_address=core.get_remote_addr(),
activity_log=al,
created=kw.get('created', timezone.now()),
)
return al
|
|
import tensorflow as tf
import numpy as np
import datetime
import threading
import random
import time
import sys
import gym
env = gym.make('Breakout-v0')
CPU_ONLY = False
TRAIN = True
BENCHMARK = False
if 'eval' in sys.argv:
TRAIN = False
if 'cpu' in sys.argv:
CPU_ONLY = True
if 'benchmark' in sys.argv:
BENCHMARK = True
NUM_AGENT_THREAD = 4
LOG_INTERVAL = 1000
SAVE_INTERVAL = 50000
# hyperparameter settings
GAMMA = .95
LEARNING_RATE = .0002
DECAY_RATE = .99
MOMENTUM = 0
EPSILON = 1e-6
BATCH_SIZE = 32
OBSERVE = 50000
ACTION_HISTORY_LENGTH = 4
MAX_EXPLORE_FRAMES = 1000000
MIN_EXPLORE_RATE = .10
MAX_D_SIZE = 1000000 # maximum size of replay queue
C = 10000 # Q reset interval
SCREEN_DIMS = 84, 84
NUM_ACTIONS = env.action_space.n
ACTION_MEANINGS = env.get_action_meanings()
env = None
print('breakout-v0-player is running with TRAIN=%s'%TRAIN)
def conv2d(x, W, s, cpu_only=False):
cpu_only = CPU_ONLY or cpu_only
return tf.nn.conv2d(x, W, strides=[1, s, s, 1] if cpu_only else [1, 1, s, s], padding='VALID', data_format='NHWC' if cpu_only else 'NCHW')
def weight_variable(shape, name=None):
initial = tf.truncated_normal(shape, stddev=0.02)
return tf.Variable(initial, name=name)
def bias_variable(shape, name=None):
initial = tf.constant(0.01, shape=shape)
return tf.Variable(initial, name=name)
def create_q(state, weights=None, cpu_only=False):
cpu_only = CPU_ONLY or cpu_only
if weights is not None:
w_conv1, b_conv1, w_conv2, b_conv2, w_conv3, b_conv3, w_fc1, b_fc1, w_fc2, b_fc2 = weights
if cpu_only:
state = tf.transpose(state, perm=[0,2,3,1])
# state: (x_1, x_2, ... x_n) of shape [-1, ACTION_HISTORY_LENGTH, HEIGHT, WIDTH]
with tf.name_scope('conv1'):
if weights is None:
w_conv1 = weight_variable([8, 8, ACTION_HISTORY_LENGTH, 32], name='w_conv1')
b_conv1 = bias_variable([32], name='b_conv1')
h_conv1 = tf.nn.relu(tf.nn.bias_add(conv2d(state, w_conv1, 4, cpu_only), b_conv1, data_format='NHWC' if cpu_only else 'NCHW'))
with tf.name_scope('conv2'):
if weights is None:
w_conv2 = weight_variable([4, 4, 32, 64], name='w_conv2')
b_conv2 = bias_variable([64])
h_conv2 = tf.nn.relu(tf.nn.bias_add(conv2d(h_conv1, w_conv2, 2, cpu_only), b_conv2, data_format='NHWC' if cpu_only else 'NCHW'))
with tf.name_scope('conv3'):
if weights is None:
w_conv3 = weight_variable([3, 3, 64, 64], name='w_conv3')
b_conv3 = bias_variable([64])
h_conv3 = tf.nn.relu(tf.nn.bias_add(conv2d(h_conv2, w_conv3, 1, cpu_only), b_conv3, data_format='NHWC' if cpu_only else 'NCHW'))
if cpu_only:
h_conv3 = tf.transpose(h_conv3, perm=[0,3,1,2])
shape = h_conv3.get_shape().as_list()
H, W = shape[2], shape[3]
h_conv3_flattened = tf.reshape(h_conv3, [-1, 64*H*W], name='h_conv3_flatten')
with tf.name_scope('fc1'):
if weights is None:
w_fc1 = weight_variable([64*H*W, 512])
b_fc1 = bias_variable([512])
h_fc1 = tf.nn.relu(tf.matmul(h_conv3_flattened, w_fc1) + b_fc1)
with tf.name_scope('fc2'):
if weights is None:
w_fc2 = weight_variable([512, NUM_ACTIONS])
b_fc2 = bias_variable([NUM_ACTIONS])
h_fc2 = tf.matmul(h_fc1, w_fc2) + b_fc2
return h_fc2, (w_conv1, b_conv1, w_conv2, b_conv2, w_conv3, b_conv3, w_fc1, b_fc1, w_fc2, b_fc2)
def create_predicted_action(q_values):
return tf.argmax(q_values, 1)
def create_max_q(q_values):
return tf.reduce_max(q_values, reduction_indices=1)
def create_q_reduced_by_action(q_values, a):
one_hot_encoded_a = tf.one_hot(a, NUM_ACTIONS, 1., 0.)
q_value = tf.reduce_sum(q_values * one_hot_encoded_a, reduction_indices=1)
return q_value
def create_loss(q_values, y, a):
q_value = create_q_reduced_by_action(q_values, a)
loss = tf.reduce_mean(tf.square(y - q_value))
return loss
def create_train_op(loss):
return tf.train.RMSPropOptimizer(LEARNING_RATE, DECAY_RATE, MOMENTUM, EPSILON).minimize(loss)
def create_preprocess(x):
grayscale = tf.image.rgb_to_grayscale(x)
resized = tf.image.resize_images(grayscale, *SCREEN_DIMS)/255.
return resized
def start_session():
global global_step, ph_new_global_step, assign_global_step
global ph_state, ph_x
global _preprocess, predicted_action, q_values, max_q, predicted_action_cpu, q_values_cpu, max_q_cpu
global gamma_max_target_q, reset_target_q, gamma_target_q_reduced_by_action, predict_by_double_dqn
global ph_y, ph_a
global loss, train_op
global input_summary, ph_avg_reward, reward_summary, ph_avg_score_per_episode, score_per_episode_summary, ph_avg_loss, loss_summary, ph_avg_max_q_value, max_q_value_summary, ph_exploration_rate, exploration_rate_summary
with tf.Graph().as_default() as g:
global_step = tf.Variable(0, name='step', trainable=False)
ph_new_global_step = tf.placeholder(tf.int32, shape=[], name='new_global_step')
assign_global_step = tf.assign(global_step, ph_new_global_step, name='assign_global_step')
with tf.name_scope('input'):
# preprocessed state(x_1, x_2, ..., x_n)
ph_x = tf.placeholder(tf.int32, shape=[210, 160, 3])
ph_state = tf.placeholder(tf.float32, shape=[None, ACTION_HISTORY_LENGTH, *SCREEN_DIMS], name='state')
ph_y = tf.placeholder(tf.float32, shape=[None], name='y') # y = r or r + gamma * max_Q^(s, a)
ph_a = tf.placeholder(tf.int64, shape=[None], name='a') # actions
with tf.device('/gpu:0'):
with tf.name_scope('Q'):
q_values, theta = create_q(ph_state)
with tf.name_scope('pi'):
predicted_action = create_predicted_action(q_values)
with tf.name_scope('max_Q'):
max_q = create_max_q(q_values)
with tf.name_scope('target_Q'):
target_q_values, theta_m1 = create_q(ph_state)
with tf.name_scope('target_Q_reduced_by_action'):
target_q_reduced_by_action = create_q_reduced_by_action(target_q_values, ph_a)
with tf.name_scope('gamma_target_Q_reduced_by_action'):
gamma_target_q_reduced_by_action = GAMMA * target_q_reduced_by_action
with tf.name_scope('predict_by_double_dqn'):
predict_by_double_dqn = GAMMA * create_q_reduced_by_action(target_q_values, predicted_action)
with tf.name_scope('max_target_Q'):
max_target_q = create_max_q(target_q_values)
with tf.name_scope('gamma_max_target_Q'):
gamma_max_target_q = GAMMA * max_target_q
with tf.name_scope('reset_target_Q'):
reset_target_q = tf.group(*(tf.assign(lvalue, rvalue) for lvalue, rvalue in zip(theta_m1, theta)))
with tf.name_scope('loss'):
loss = create_loss(q_values, ph_y, ph_a)
with tf.name_scope('train'):
train_op = create_train_op(loss)
with tf.device('/cpu:0'):
with tf.name_scope('preprocess'):
_preprocess = create_preprocess(ph_x)
with tf.name_scope('Q_cpu'):
q_values_cpu, _ = create_q(ph_state, theta, cpu_only=True)
with tf.name_scope('pi_cpu'):
predicted_action_cpu = create_predicted_action(q_values_cpu)
with tf.name_scope('max_Q_cpu'):
max_q_cpu = create_max_q(q_values_cpu)
# summaries
input_summary = tf.image_summary('input', tf.reshape(tf.transpose(ph_state[0:1,:,:,:], perm=[1,2,3,0]), [-1, *SCREEN_DIMS, 1]), max_images=ACTION_HISTORY_LENGTH)
# update every input()
ph_avg_reward = tf.placeholder(tf.float32, shape=[], name='avg_reward')
reward_summary = tf.scalar_summary('_reward', ph_avg_reward)
# update at new_episode()
ph_avg_score_per_episode = tf.placeholder(tf.float32, shape=[], name='avg_score_per_episode')
score_per_episode_summary = tf.scalar_summary('_score_per_episode', ph_avg_score_per_episode)
# update at train()
ph_avg_loss = tf.placeholder(tf.float32, shape=[], name='avg_loss')
loss_summary = tf.scalar_summary('_loss', ph_avg_loss)
# update at train()
ph_exploration_rate = tf.placeholder(tf.float32, shape=[], name='avg_loss')
exploration_rate_summary = tf.scalar_summary('_exploration_rate', ph_exploration_rate)
# update at inference
ph_avg_max_q_value = tf.placeholder(tf.float32, shape=[], name='avg_max_q_value')
max_q_value_summary = tf.scalar_summary('_max_q_value', ph_avg_max_q_value)
# start session
sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))
initializers = (tf.initialize_all_variables(), reset_target_q)
saver = tf.train.Saver()
checkpoint = tf.train.get_checkpoint_state("saved_networks")
if checkpoint and checkpoint.model_checkpoint_path:
saver.restore(sess, checkpoint.model_checkpoint_path)
print("Successfully loaded:", checkpoint.model_checkpoint_path)
else:
print("Could not find old network weights")
import os
assert os.path.isdir('saved_networks')
assert TRAIN
for initializer in initializers:
sess.run(initializer)
g.finalize()
return sess, saver
def save_networks(step):
sess.run(assign_global_step, feed_dict={ph_new_global_step: step})
saver.save(sess, 'saved_networks/' + 'network' + '-dqn', global_step=step)
print('[%s] Successfully saved networks -'%datetime.datetime.now(), step)
def get_exploration_rate():
return max(MIN_EXPLORE_RATE, 1. + (MIN_EXPLORE_RATE - 1.) * step / MAX_EXPLORE_FRAMES)
def train_step():
global step, st, ps
global total_loss, cnt_loss
minibatch = random.sample(D, BATCH_SIZE)
state_batch = []
action_batch = []
y_batch = []
undone_indices = []
undone_state_p1 = []
for i, (t_state, t_action, t_reward, t_state_p1, t_done) in enumerate(minibatch):
state_batch.append(t_state)
action_batch.append(t_action)
y_batch.append(t_reward)
if t_done == False: # to calculate future rewards
undone_indices.append(i)
undone_state_p1.append(t_state_p1)
# calculate future rewards
predicted_q_values = sess.run(gamma_max_target_q, feed_dict={ph_state: undone_state_p1})
# double DQN
#predicted_q_values = sess.run(predict_by_double_dqn, feed_dict={ph_state: undone_state_p1})
for i, j in enumerate(undone_indices):
y_batch[j] += predicted_q_values[i]
# train
_, current_loss = sess.run([train_op, loss], feed_dict={ph_y: y_batch, ph_state: state_batch, ph_a: action_batch})
# log loss
cnt_loss += 1
total_loss += current_loss
t_cnt_loss = cnt_loss
if t_cnt_loss == (LOG_INTERVAL // 10): # and TRAIN # is always True
summary_writer.add_summary(sess.run(loss_summary, feed_dict={ph_avg_loss: total_loss/cnt_loss}), step)
summary_writer.add_summary(sess.run(exploration_rate_summary, feed_dict={ph_exploration_rate: get_exploration_rate()}), step)
total_loss = 0
cnt_loss = 0
step += 1
if BENCHMARK and step%100==0:
print((step-ps)/(time.time()-st),'iterations per second')
st = time.time()
ps = step
if step % C == 0:
sess.run(reset_target_q)
if step % SAVE_INTERVAL == 0 and not BENCHMARK:
print('Autosaving networks ...')
save_networks(step)
def preprocess(x):
return sess.run(_preprocess, feed_dict={ph_x: x})[:, :, 0]
def put_experience(s, a, r, s_p, t, D_lock=None):
global D_index
if D_lock:
D_lock.acquire()
new_exp = (s, a, r, s_p, t)
if len(D) >= MAX_D_SIZE:
D[D_index] = new_exp
D_index += 1
if D_index == len(D):
D_index = 0
else:
D.append(new_exp)
if D_lock:
D_lock.release()
def agent_worker(agent_coord, D_lock=None):
assert OBSERVE <= MAX_D_SIZE
global D, total_loss, cnt_loss, st, ps
env = gym.make('Breakout-v0')
get_state = lambda current:prev_ob_list[-ACTION_HISTORY_LENGTH:] if current else prev_ob_list[-ACTION_HISTORY_LENGTH-1:-1]
total_reward = 0
cnt_reward = 0
total_score_per_episode = 0
cnt_score_per_episode = 0
total_max_q_value = 0
cnt_max_q_value = 0
total_loss = 0
cnt_loss = 0
# benchmark
st = time.time()
ps = step
while not agent_coord.should_stop():
# new episode
observation = env.reset()
done = None
score = 0
cnt_same_state = 0
last_score = None
prev_ob_list = [preprocess(observation)] * (ACTION_HISTORY_LENGTH - 1) # previous observations
while not agent_coord.should_stop():
prev_ob_list.append(preprocess(observation))
if not TRAIN:
env.render()
if done is not None and TRAIN:
put_experience(get_state(False), action, min(1, reward), get_state(True), done, D_lock)
if len(D) > (OBSERVE if not BENCHMARK else BATCH_SIZE):
train_step()
if done is not None and done:
if not TRAIN:
print('score:', score)
time.sleep(1)
break
if TRAIN and (random.random() < get_exploration_rate()):
action = env.action_space.sample()
else:
# evaluate
ops = [predicted_action, max_q]
if not TRAIN:
ops = [predicted_action, max_q, q_values]
feed_dict = {ph_state: (get_state(True),)}
if cnt_max_q_value == LOG_INTERVAL:
ops.extend([input_summary, max_q_value_summary])
feed_dict[ph_avg_max_q_value] = total_max_q_value / cnt_max_q_value
total_max_q_value = 0
cnt_max_q_value = 0
ret = sess.run(ops, feed_dict=feed_dict)
action = ret[0][0]
# prevent the agent from doing nothing
if not TRAIN:
if last_score == score:
cnt_same_state += 1
if cnt_same_state >= 50:
action = 1 # FIRE
cnt_same_state = 0
else:
cnt_same_state = 0
last_score = score
if len(D) >= OBSERVE:
total_max_q_value += ret[1][0]
cnt_max_q_value += 1
if TRAIN:
for summary in ret[2:]:
summary_writer.add_summary(summary, step)
else:
print(ret[-1])
print(ACTION_MEANINGS[action], '\t' if len(ACTION_MEANINGS[action]) >= 8 else '\t\t', ret[1][0])
observation, reward, done, info = env.step(action)
score += reward
if len(D) >= OBSERVE:
total_reward += reward
cnt_reward += 1
if cnt_reward == (LOG_INTERVAL*10):
summary_writer.add_summary(sess.run(reward_summary, feed_dict={ph_avg_reward: total_reward/cnt_reward}), step)
total_reward = 0
cnt_reward = 0
# episode done
if len(D) >= OBSERVE:
total_score_per_episode += score
cnt_score_per_episode += 1
if cnt_score_per_episode == (LOG_INTERVAL//10):
summary_writer.add_summary(sess.run(score_per_episode_summary, feed_dict={ph_avg_score_per_episode:total_score_per_episode/cnt_score_per_episode}), step)
total_score_per_episode = 0
cnt_score_per_episode = 0
def main():
global sess, saver, summary_writer, D, D_index, step
sess, saver = start_session()
step = sess.run(global_step)
summary_writer=tf.train.SummaryWriter('logdir', sess.graph)
coord = tf.train.Coordinator()
D = [] # replay memory
D_index = 0
if TRAIN:
D_lock = threading.Lock()
agent_coord = tf.train.Coordinator()
agent_threads = []
for i in range(NUM_AGENT_THREAD):
agent_thread = threading.Thread(target=agent_worker, args=(agent_coord, D_lock))
agent_thread.start()
agent_threads.append(agent_thread)
print("Waiting for initial observation")
while len(D) < (OBSERVE if not BENCHMARK else BATCH_SIZE):
print("Current len(D):", len(D))
time.sleep(1)
agent_coord.request_stop()
agent_coord.join(agent_threads)
try:
agent_worker(coord)
except Exception as e:
print(e)
# Report exceptions to the coordinator.
coord.request_stop(e)
finally:
coord.request_stop()
if TRAIN and not BENCHMARK:
print('Received should_stop - Saving networks ...')
save_networks(step)
if __name__ == '__main__':
main()
|
|
#!/usr/bin/env python3
"""
Copyright 2017 Andris Zbitkovskis
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from individuals import Individual
from datetime import datetime
class Population:
def get_individual_bin_size(self,a_min,a_max,a_int):
i_size=1+(float(a_max)-float(a_min))/float(a_int)
res=0
for i in range(16):
l_limit=2**i
u_limit=2**(i+1)
if i_size >l_limit and i_size <= u_limit:
res=i+1
break
return res
def init_population(self,src_individuals,dst_inds=None):
if dst_inds is None:
dst_inds=self.individuals
for ind in src_individuals:
weights=[]
for w in ind.weights:
weights.append((w['weight'],w['idx']))
t_ind=Individual(weights,ind.byte_size)
t_ind.fitness_func=self.fitness_func
t_ind.fitness=ind.fitness
#t_ind.recalc_fitness()
self.append(t_ind,dst_inds)
def encode_value(self,a_value):
k=int((float(a_value)-(self.l_limit))*(2**self.b_size-1)/(self.u_limit-self.l_limit))
return k
def decode_value(self,a_value):
x=self.l_limit+int(a_value)*(self.u_limit-self.l_limit)/float(2**self.b_size-1)
return x
def get_universe(self,ngigits=2):
v_size=2**self.b_size
res=[Individual]*v_size
for i in range(v_size):
x=round(self.decode_value(i),ngigits)
res[i]=Individual([(x,i)],self.b_size)
return res
def get_individual_from_br(self,ab_rep):
i=0
j=0
b_rep=[]
weights=[]
tind=Individual([(0,0)],self.b_size)
for b in ab_rep:
if i == j*self.b_size:
if j != 0 :
xk=tind.d_convert(b_rep,self.b_size)
x=round(self.decode_value(xk),2)
weights.append((x,xk))
b_rep=[]
j+=1
b_rep.append(b)
i+=1
xk=tind.d_convert(b_rep,self.b_size)
x=round(self.decode_value(xk),2)
weights.append((x,xk))
ind=Individual(weights,self.b_size)
return ind
def append(self,a_ind,a_list=None):
if a_list is None:
a_list=self.individuals
a_ind.fitness_func=self.fitness_func
# a_ind.recalc_fitness()
a_list.append(a_ind)
self.size+=1
def set_individual(self,a_ind,index,a_list=None):
if a_list is None:
a_list=self.individuals
a_ind.fitness_func=self.fitness_func
# a_ind.recalc_fitness()
a_list[index]=a_ind
def append_by_br(self,ab_rep):
ind=self.get_individual_from_br(ab_rep)
ind.fitness_func=self.fitness_func
#ind.recalc_fitness()
self.individuals.append(ind)
self.size+=1
def set_individual_by_br(self,ab_rep,index):
ind=self.get_individual_from_br(ab_rep)
ind.fitness_func=self.fitness_func
ind.recalc_fitness()
self.individuals[index]=ind
def get_stats(self,inds=None):
if inds is None:
inds=self.individuals
avgf=0
maxf=0
sumf=0
c=0
for i in inds:
if i.fitness>maxf:
maxf=i.fitness
sumf+=i.fitness
c+=1
avgf=sumf/c
avgf=round(avgf,4)
maxf=round(maxf,4)
sumf=round(sumf,4)
return (sumf,maxf,avgf)
def get_neo(self):
maxf=0
for i in self.individuals:
if i.fitness>maxf:
maxf=i.fitness
one=i
return one
def get_sum_fitness(self,a_inds):
res=0
for i in a_inds:
res+=i.fitness
return res
def init_roulette(self,verbose=False,a_inds=None):
if a_inds is None:
a_inds=self.individuals
sumf=self.get_sum_fitness(a_inds)
l=len(a_inds)
ipos=0.0
for i in range(l-1):
ipos+= round(a_inds[i].fitness/sumf,4)
self.possibilities.append(ipos)
self.possibilities.append(1.0)
self.init_population(a_inds,self.rndselection)
for ri in range(len(self.rndselection)):
i=self.rndselection[ri]
def get_chosen_idx(self,a_rnd):
i=0
while (a_rnd>self.possibilities[i]):
i+=1
return i
def get_equal_intervals(self,a_byte_lenght):
# japarbauda n+byte
v_interval=1.0/a_byte_lenght
res=[]
for i in range(a_byte_lenght):
res.append(i*v_interval)
res.append(1.0)
return res
def get_cross_point(self,a_rnd,a_pCnt,verbose=False):
interval=self.get_equal_intervals(a_pCnt)
if verbose:
print ("Cross_point probabilities:")
for pi in range(len(interval)-1):
print (" {} ".format(round(interval[pi],4)),end='')
print (" ")
i=0
if ( a_rnd!=0 ):
while not ( interval[i] <a_rnd <= interval[i+1] ):
i+=1
return i+1
def get_mutation_point(self,a_rnd,a_byte_length,verbose=False):
interval=self.get_equal_intervals(a_byte_length)
i=0
if ( a_rnd!=0 ):
while not ( interval[i] <a_rnd <= interval[i+1] ):
i+=1
return i
def get_cross_points(self,a_rnd_a,a_rnd_b,a_pCnt,verbose=False):
point_a=self.get_cross_point(a_rnd_a,a_pCnt,verbose)
point_b=a_pCnt+self.get_cross_point(a_rnd_b,a_pCnt-1,verbose)
return (point_a,point_b)
def cross_over(self,a_ind_a,a_ind_b,a_rnd_a,a_rnd_b,a_pCnt,verbose=False):
point_a,point_b=self.get_cross_points(a_rnd_a,a_rnd_b,a_pCnt,verbose)
res_a=[]
res_b=[]
for i in range(len(a_ind_a.b_rep)):
if i < point_a or i >= point_b:
res_a.append(a_ind_a.b_rep[i])
res_b.append(a_ind_b.b_rep[i])
else:
res_a.append(a_ind_b.b_rep[i])
res_b.append(a_ind_a.b_rep[i])
ind_rA=self.get_individual_from_br(res_a)
ind_rB=self.get_individual_from_br(res_b)
if verbose:
r1=a_ind_a.sb_rep()
r2=a_ind_b.sb_rep()
print("{} Cpoint {}({}) {}({}) => {}".format(r1,point_a,round(a_rnd_a,4),point_b,round(a_rnd_b,4),ind_rA.sb_rep()))
print("{} Cpoint {}({}) {}({}) => {}".format(r2,point_a,round(a_rnd_a,4),point_b,round(a_rnd_b,4),ind_rB.sb_rep()))
return (ind_rA,ind_rB)
def exec_mutation(self,ind,a_rNumbers,a_mutation_point_cnt,verbose=False):
m_points=[]
w_length=len(ind.b_rep)
res_bR=[]
for i in range(a_mutation_point_cnt):
rnd=a_rNumbers[i]
point=self.get_mutation_point (rnd,w_length,i==0 and verbose)
m_points.append( point)
if verbose:
print ("mpoint {} ({}) ".format(point,round(rnd,4)),end='')
if verbose:
print (" ")
for i in range(w_length):
if i in m_points:
if ind.b_rep[i] == 0:
res_bR.append(1)
else:
res_bR.append(0)
else:
res_bR.append(ind.b_rep[i])
ind_r=self.get_individual_from_br(res_bR)
return ind_r
def print_ind(self,inds=None,prefix="",delimiter='\t',log_file=None):
if inds is None:
inds=self.individuals
prefix="[{}]: {}:\t".format(datetime.now().time(),prefix)
print_strings=[]
header=prefix+"Fitness"
for hi in range(len(inds[0].weights)):
header+="{}W{}{}WI{}".format(delimiter,hi,delimiter,hi,hi)
header+=delimiter+"Wbin"
print_strings.append(header)
for ri in range(len(inds)):
i=inds[ri]
data=prefix+"{}".format(i.fitness)
for hi in range(len(i.weights)):
w= i.weights[hi]
data+="{}{}{}{}".format(delimiter,w['weight'],delimiter,w['idx'])
data+="{}{}".format(delimiter,i.str_binary_rep())
print_strings.append(data)
sumf,maxf,avgf = self.get_stats(inds)
print_strings.append( "{}SUM/MAX/AVG: {}/{}/{}".format(prefix,sumf,maxf,avgf) )
if log_file is None:
[self.logger.debug(ps) for ps in print_strings]
else:
#log_file=open(filename,'a')
[log_file.write(ps+'\n') for ps in print_strings]
#log_file.close()
def __init__(self,al_limit,au_limit,a_interval,a_fitness_func,pop_idx , log_file):
self.individuals=[]
self.possibilities=[]
self.rndselection=[]
self.selection=[]
self.selection_source=[]
self.childs=[]
self.u_limit=float(au_limit)
self.l_limit=float(al_limit)
self.interval=float(a_interval)
self.b_size=self.get_individual_bin_size(al_limit,au_limit,a_interval)
self.fitness_func=a_fitness_func
self.fitness=0
self.final=[]
self.size=0
self.pop_idx = pop_idx
self.log_file = log_file
|
|
import numpy as np
import scipy.optimize
import matplotlib.pyplot as plt
import psycopg2
from dataprocessing import *
from bicycle import WheelAssemblyGyrostat
# Error tolerance used for curve_fit
ftol = 1e-12
xtol = 1e-12
# Establish database connection
conn = psycopg2.connect(database="robot_bicycle_parameters", user="hazelnusse")
cur = conn.cursor()
# Fork mass center measurements
cur.execute('select alpha, a from parametermeasurements.forkcenterofmass;')
results = np.array(cur.fetchall())
alpha_fork = results[:, 0]
fork_d, fork_f = compute_d_f(results)[0]
# Frame mass center measurements
cur.execute('select alpha, a from parametermeasurements.framecenterofmass;')
results = np.array(cur.fetchall())
alpha_frame = results[:, 0]
frame_d, frame_f = compute_d_f(results)[0]
fig_fork_torsional = plt.figure()
fig_frame_torsional = plt.figure()
fig_calibrationrod = plt.figure()
fig_rearwheel_torsional = plt.figure()
fig_frontwheel_torsional = plt.figure()
axes_fork = []
axes_frame = []
axes_rod = fig_calibrationrod.add_subplot(111)
axes_rearwheel = []
axes_frontwheel = []
# Calculation of calibration rod period
rod_periods = []
for i in [1, 2, 3, 4, 5]:
cur.execute('select samplefrequency, duration, voltage from ' +
'parametermeasurements.rodtorsionalpendulumtimeseries'
' where id = {0};'.format(i))
f, d, v = cur.fetchall()[0]
v = np.array(v)
t, Ts = np.linspace(0, d, len(v), endpoint=False, retstep=True)
assert (Ts == 1.0/f)
p0 = [max(v)-v.mean(), 0.01, 1.0, np.pi/2, v.mean()]
popt, pcov = scipy.optimize.curve_fit(decaying_sinusoid, t, v, p0=p0,
ftol=ftol, xtol=xtol)
a, zeta, T, d, e = popt
rod_periods.append(T)
v_fit = decaying_sinusoid(t, a, zeta, T, d, e)
axes_rod.plot(t, v, colors[i-1]+',')
axes_rod.plot(t, v_fit, colors[i-1]+'-')
axes_rod.set_title("Calibration rod torsional pendulum")
# Compute the mean of all periods
rod_period = np.array(rod_periods).mean()
#print("Rod periods = " + str(rod_periods))
#print("Mean rod period = " + str(rod_period))
# Calculation of calibration rod inertia
cur.execute('select mass, outsidediameter, insidediameter, length from parametermeasurements.calibrationrods;')
mass, Od, Id, length = cur.fetchall()[0]
rod_transverse_inertia = 1.0/12.0*mass*(3.0*(Od**2 + Id**2)/4.0 + length**2)
#print("Rod transverse inertia = " + str(rod_transverse_inertia))
# Frame and Fork Torsional Pendulum measurements
fork_periods = []
frame_periods = []
for i in [1, 2, 3, 4]:
axes_frame.append(fig_frame_torsional.add_subplot(320 + i))
axes_fork.append(fig_fork_torsional.add_subplot(320 + i))
# Frame period calculations
cur.execute('select * from' +
' parametermeasurements.frametorsionalpendulumtimeseries '+
' where configuration = {0};'.format(i))
results = np.array(cur.fetchall())
T_sum = 0
for j, trial in enumerate(results):
v = np.array(trial[4])
t, Ts = np.linspace(0, trial[3], len(trial[4]), endpoint=False, retstep=True)
assert (Ts == 1.0/trial[2])
p0 = [max(v)-v.mean(), 0.001, 2.0, np.pi/2, v.mean()]
popt, pcov = scipy.optimize.curve_fit(decaying_sinusoid, t, v, p0=p0,
ftol=ftol, xtol=xtol)
a, zeta, T, d, e = popt
T_sum += T
v_fit = decaying_sinusoid(t, a, zeta, T, d, e)
axes_frame[i-1].plot(t, v, colors[j] + ',')
axes_frame[i-1].plot(t, v_fit, colors[j] + '-')
axes_frame[i-1].set_title("Frame torsional pendulum configuration {0}".format(i))
# Compute the mean of all periods for all trials at a given configuration
frame_periods.append(T_sum / (j + 1))
# Fork period calculations
cur.execute('select * from' +
' parametermeasurements.forktorsionalpendulumtimeseries '+
' where configuration = {0};'.format(i))
results = np.array(cur.fetchall())
T_sum = 0
for j, trial in enumerate(results):
v = np.array(trial[4])
t, Ts = np.linspace(0, trial[3], len(trial[4]), endpoint=False, retstep=True)
assert (Ts == 1.0/trial[2])
p0 = [max(v)-v.mean(), 0.01, 0.3, np.pi/2, v.mean()]
popt, pcov = scipy.optimize.curve_fit(decaying_sinusoid, t, v, p0=p0,
ftol=ftol, xtol=xtol)
a, zeta, T, d, e = popt
T_sum += T
v_fit = decaying_sinusoid(t, a, zeta, T, d, e)
axes_fork[i-1].plot(t, v, colors[j]+',')
axes_fork[i-1].plot(t, v_fit, colors[j]+'-')
axes_fork[i-1].set_title("Fork torsional pendulum configuration {0}".format(i))
# Compute the mean of all periods for all trials at a given configuration
fork_periods.append(T_sum / (j+1))
#print("Mean frame periods:")
#print(frame_periods)
#print("Mean fork periods:")
#print(fork_periods)
# Compute frame inertia scalars
frame_inertias = compute_Ixx_Ixz_Izz(alpha_frame[:-1], frame_periods,
rod_transverse_inertia, rod_period)
IRxx, IRxz, IRzz = frame_inertias[0]
# Compute fork inertia scalars
fork_inertias = compute_Ixx_Ixz_Izz(alpha_fork[:-1], fork_periods,
rod_transverse_inertia, rod_period)
IFxx, IFxz, IFzz = fork_inertias[0]
# Frame and Fork compound pendulum
axes_frame.append(fig_frame_torsional.add_subplot(325))
axes_fork.append(fig_fork_torsional.add_subplot(325))
# Frame period calculations
cur.execute('select * from parametermeasurements.framecompoundpendulumtimeseries;')
results = np.array(cur.fetchall())
T_sum = 0
for j, trial in enumerate(results):
v = np.array(trial[4])
t, Ts = np.linspace(0, trial[3], len(trial[4]), endpoint=False, retstep=True)
assert (Ts == 1.0/trial[2])
p0 = [max(v)-v.mean(), 0.01, 1.75, 0.0, v.mean()]
popt, pcov = scipy.optimize.curve_fit(decaying_sinusoid, t, v, p0=p0,
ftol=ftol, xtol=xtol)
a, zeta, T, d, e = popt
T_sum += T
v_fit = decaying_sinusoid(t, a, zeta, T, d, e)
axes_frame[4].plot(t, v, colors[j] + ',')
axes_frame[4].plot(t, v_fit, colors[j] + '-')
axes_frame[4].set_title("Frame compound pendulum configuration")
# Compute the mean of all periods for all trials
T_frame_compound = T_sum / (j + 1)
# Fork period calculations
cur.execute('select * from parametermeasurements.forkcompoundpendulumtimeseries;')
results = np.array(cur.fetchall())
T_sum = 0
for j, trial in enumerate(results):
v = np.array(trial[4])
t, Ts = np.linspace(0, trial[3], len(trial[4]), endpoint=False, retstep=True)
assert (Ts == 1.0/trial[2])
p0 = [max(v)-v.mean(), 0.01, 1.25, 0.0, v.mean()]
popt, pcov = scipy.optimize.curve_fit(decaying_sinusoid, t, v, p0=p0,
ftol=ftol, xtol=xtol)
a, zeta, T, d, e = popt
T_sum += T
v_fit = decaying_sinusoid(t, a, zeta, T, d, e)
axes_fork[4].plot(t, v, colors[j]+',')
axes_fork[4].plot(t, v_fit, colors[j]+'-')
axes_fork[4].set_title("Fork compound pendulum configuration")
# Compute the mean of all periods for all trials
T_fork_compound = T_sum / (j + 1)
# Compute rear wheel inertias
cur.execute('select samplefrequency, duration, voltage from parametermeasurements.rearwheeltorsionalpendulumtimeseries;')
results = cur.fetchall()
rearwheel_periods = []
axes_rearwheel.append(fig_rearwheel_torsional.add_subplot(211))
for i, trial in enumerate(results):
f, d, v = trial
v = np.array(v)
t, Ts = np.linspace(0, d, len(v), endpoint=False, retstep=True)
assert (Ts == 1.0/f)
# Initial guess on parameters
p0 = [max(v)-v.mean(), 0.001, .5, np.pi/2, v.mean()]
popt, pcov = scipy.optimize.curve_fit(decaying_sinusoid, t, v, p0=p0,
ftol=ftol, xtol=xtol)
a, zeta, T, d, e = popt
rearwheel_periods.append(T)
v_fit = decaying_sinusoid(t, a, zeta, T, d, e)
axes_rearwheel[0].plot(t, v, colors[i] + ',')
axes_rearwheel[0].plot(t, v_fit, colors[i] + '-')
axes_rearwheel[0].set_title('Rear wheel torsional pendulum')
#print("Rear wheel periods: {0}".format(rearwheel_periods))
rearwheel_period = np.array(rearwheel_periods).mean()
#print("Mean rear wheel period = {0}".format(rearwheel_period))
IRWxx = rod_transverse_inertia*(rearwheel_period/rod_period)**2
# Compute front wheel inertias
cur.execute('select samplefrequency, duration, voltage from parametermeasurements.frontwheeltorsionalpendulumtimeseries;')
results = cur.fetchall()
frontwheel_periods = []
axes_frontwheel.append(fig_frontwheel_torsional.add_subplot(211))
for i, trial in enumerate(results):
f, d, v = trial
v = np.array(v)
t, Ts = np.linspace(0, d, len(v), endpoint=False, retstep=True)
assert (Ts == 1.0/f)
# Initial guess on parameters
p0 = [max(v)-v.mean(), 0.001, .4, np.pi/2, v.mean()]
popt, pcov = scipy.optimize.curve_fit(decaying_sinusoid, t, v, p0=p0,
ftol=ftol, xtol=xtol)
a, zeta, T, d, e = popt
frontwheel_periods.append(T)
v_fit = decaying_sinusoid(t, a, zeta, T, d, e)
axes_frontwheel[0].plot(t, v, colors[i] + ',')
axes_frontwheel[0].plot(t, v_fit, colors[i] + '-')
axes_frontwheel[0].set_title('Front wheel torsional pendulum')
#print("Front wheel periods: {0}".format(frontwheel_periods))
frontwheel_period = np.array(frontwheel_periods).mean()
#print("Mean front wheel period = {0}".format(frontwheel_period))
IFWxx = rod_transverse_inertia*(frontwheel_period/rod_period)**2
# Compute rear wheel spin inertias
cur.execute('select samplefrequency, duration, voltage from parametermeasurements.rearwheelcompoundpendulumtimeseries;')
results = cur.fetchall()
rearwheel_periods = []
axes_rearwheel.append(fig_rearwheel_torsional.add_subplot(212))
for i, trial in enumerate(results):
f, d, v = trial
v = np.array(v)
t, Ts = np.linspace(0, d, len(v), endpoint=False, retstep=True)
assert (Ts == 1.0/f)
# Initial guess on parameters
p0 = [max(v)-v.mean(), 0.001, 1.4, np.pi/2, v.mean()]
popt, pcov = scipy.optimize.curve_fit(decaying_sinusoid, t, v, p0=p0,
ftol=ftol, xtol=xtol)
a, zeta, T, d, e = popt
rearwheel_periods.append(T)
v_fit = decaying_sinusoid(t, a, zeta, T, d, e)
axes_rearwheel[1].plot(t, v, colors[i] + ',')
axes_rearwheel[1].plot(t, v_fit, colors[i] + '-')
axes_rearwheel[1].set_title('Rear wheel compound pendulum')
T_rw_compound = np.array(rearwheel_periods).mean()
# Compute front wheel spin inertias
cur.execute('select samplefrequency, duration, voltage from parametermeasurements.frontwheelcompoundpendulumtimeseries;')
results = cur.fetchall()
frontwheel_periods = []
axes_frontwheel.append(fig_frontwheel_torsional.add_subplot(212))
for i, trial in enumerate(results):
f, d, v = trial
v = np.array(v)
t, Ts = np.linspace(0, d, len(v), endpoint=False, retstep=True)
assert (Ts == 1.0/f)
# Initial guess on parameters
p0 = [max(v)-v.mean(), 0.001, 1.4, np.pi/2, v.mean()]
popt, pcov = scipy.optimize.curve_fit(decaying_sinusoid, t, v, p0=p0,
ftol=ftol, xtol=xtol)
a, zeta, T, d, e = popt
frontwheel_periods.append(T)
v_fit = decaying_sinusoid(t, a, zeta, T, d, e)
axes_frontwheel[1].plot(t, v, colors[i] + ',')
axes_frontwheel[1].plot(t, v_fit, colors[i] + '-')
axes_frontwheel[1].set_title('Front wheel compound pendulum')
T_fw_compound = np.array(frontwheel_periods).mean()
# Get masses, compound pendulum lengths
cur.execute("select mass from parametermeasurements.massmeasurements;")
results = cur.fetchall()
(m_rw,), (m_fw,), (m_fork,), (m_frame,) = results
cur.execute("select length from parametermeasurements.compoundpendulumlengths;")
(l_rw,), (l_fw,) = cur.fetchall()
# Rear wheel radius
cur.execute("select revolutions, distance from parametermeasurements.wheelrolloutmeasurements where body = 'Rear Wheel';")
results = cur.fetchall()
circumference_rear = np.array([d / N for N, d in results])
radius_rear = circumference_rear.mean() / (2.0*np.pi)
# Front wheel radius
cur.execute("select revolutions, distance from parametermeasurements.wheelrolloutmeasurements where body = 'Front Wheel';")
results = cur.fetchall()
circumference_front = np.array([d / N for N, d in results])
radius_front = circumference_front.mean() / (2.0*np.pi)
g = 9.81
IRWyy = (T_rw_compound / 2. / np.pi)**2. * m_rw * g * l_rw - m_rw * l_rw**2.
IFWyy = (T_fw_compound / 2. / np.pi)**2. * m_fw * g * l_fw - m_fw * l_fw**2.
l_frame = np.sqrt(frame_d**2.0 + frame_f**2.0)
l_fork = np.sqrt(fork_d**2.0 + fork_f**2.0)
IRyy = (T_frame_compound / 2. / np.pi)**2.0 * m_frame * g * l_frame - m_frame * l_frame**2.
IFyy = (T_fork_compound / 2. / np.pi)**2.0 * m_fork * g * l_fork - m_fork * l_fork**2.
# Axle measurements
cur.execute("select * from parametermeasurements.axleoffsetmeasurements;")
results = cur.fetchall()[0]
id, lr, lf, ls = results
# Rear gyrostat caclulations
frame_com = [frame_d, 0.0, frame_f]
I_Frame = [IRxx, IRyy, IRzz, 0.0, 0.0, IRxz]
I_RW = [IRWxx, IRWyy, IRWxx, 0.0, 0.0, 0.0]
mr, r_RWO_RO, I_R_RO = GyrostatParameters(m_frame, m_rw, frame_com, I_Frame, I_RW)
rear = WheelAssemblyGyrostat()
rear.Ixx = I_R_RO[0]
rear.Iyy = I_R_RO[1]
rear.Izz = I_R_RO[2]
rear.Ixz = I_R_RO[5]
rear.J = IRWyy
rear.m = mr
rear.R = radius_rear
rear.a = r_RWO_RO[0]
rear.b = r_RWO_RO[2]
rear.c = lr
print("Rear Gyrostat:")
print(rear)
# Front Gyrostat calculations
fork_com = [fork_d, 0.0, fork_f]
I_Fork = [IFxx, IFyy, IFzz, 0.0, 0.0, IFxz]
I_FW = [IFWxx, IFWyy, IFWxx, 0.0, 0.0, 0.0]
mf, r_FWO_FO, I_F_FO = GyrostatParameters(m_fork, m_fw, fork_com, I_Fork, I_FW)
front = WheelAssemblyGyrostat()
front.Ixx = I_F_FO[0]
front.Iyy = I_F_FO[1]
front.Izz = I_F_FO[2]
front.Ixz = I_F_FO[5]
front.J = IFWyy
front.m = mf
front.R = radius_front
front.a = r_FWO_FO[0]
front.b = r_FWO_FO[2]
front.c = lf
print("Front gyrostat parameters:")
print(front)
print("ls = {0}".format(ls))
#plt.show()
cur.close()
conn.close()
|
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import print_function
import itertools as it, operator as op, functools as ft
from collections import deque, OrderedDict
import ConfigParser as configparser
import os, sys, io, logging, re, time, types, string, unicodedata, random
import json, subprocess, signal, fcntl, select, errno, base64, hashlib, ctypes
class Conf(object):
def __repr__(self): return repr(vars(self))
adjust_step = 5
max_level = 2 ** 16 # absolute value (as used in PA), displayed as "100%"
min_level = 0 # absolute value (as used in PA), displayed as "0%"
use_device_name = False
use_media_name = False
placeholder_media_names = 'audio stream', 'AudioStream', 'Output', 'ALSA Playback'
name_len_max = 100
name_cut_from = 'left' # "left" or "right"
name_show_level = True
overkill_redraw = False # if terminal gets resized often, might cause noticeable flickering
verbose = False
watchdog = False
watchdog_opts = None
watchdog_ping_interval = 20
watchdog_ping_timeout = 70
stream_params = None
broken_chars_replace = u'_'
focus_default = 'first' # either "first" or "last"
focus_new_items = True
focus_new_items_delay = 5.0 # min seconds since last focus change to trigger this
@staticmethod
def parse_bool(val, _states={
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False }):
try: return _states[val.lower()]
except KeyError: raise ValueError(val)
def update_conf_from_file(conf, path_or_file):
if isinstance(path_or_file, types.StringTypes): path_or_file = open(path_or_file)
with path_or_file as src:
config = configparser.RawConfigParser(allow_no_value=True)
config.readfp(src)
for k in dir(conf):
if k.startswith('_'): continue
v = getattr(conf, k)
if isinstance(v, types.StringTypes):
get_val = lambda *a: force_str_type(config.get(*a), v)
elif isinstance(v, bool): get_val = config.getboolean
elif isinstance(v, (int, long)): get_val = config.getint
elif isinstance(v, float): get_val = lambda *a: float(config.get(*a))
else: continue # values with other types cannot be specified in config
for k_conf in k, k.replace('_', '-'):
try: setattr(conf, k, get_val('default', k_conf))
except configparser.Error: pass
conf.stream_params = OrderedDict(conf.stream_params or dict())
for sec in config.sections():
if not re.search(r'^stream\b.', sec): continue
params = list()
for k, v in config.items(sec):
match = re.search(r'^(match|equals)\[(.*)\]$', k)
if match:
v = re.compile(r'^{}$'.format(re.escape(v)) if match.group(1) == 'equals' else v)
params.append(('match', match.group(2), v))
else: params.append(('set', k, v))
conf.stream_params[sec] = params
def mono_time():
if not hasattr(mono_time, 'ts'):
class timespec(ctypes.Structure):
_fields_ = [('tv_sec', ctypes.c_long), ('tv_nsec', ctypes.c_long)]
librt = ctypes.CDLL('librt.so.1', use_errno=True)
mono_time.get = librt.clock_gettime
mono_time.get.argtypes = [ctypes.c_int, ctypes.POINTER(timespec)]
mono_time.ts = timespec
ts = mono_time.ts()
if mono_time.get(4, ctypes.pointer(ts)) != 0:
err = ctypes.get_errno()
raise OSError(err, os.strerror(err))
return ts.tv_sec + ts.tv_nsec * 1e-9
dbus_abbrevs = dict(
pulse='org.PulseAudio.Core1',
props='org.freedesktop.DBus.Properties' )
dbus_abbrev = lambda k: dbus_abbrevs.get(k, k)
dbus_join = lambda *parts: '.'.join(map(dbus_abbrev, parts[:-1]) + parts[-1])
def dbus_bytes(dbus_arr, strip='\0' + string.whitespace):
return bytes(bytearray(dbus_arr).strip(strip))
def strip_dbus_types(data):
# Necessary because dbus types subclass python types,
# yet don't serialize in the same way - e.g. str(dbus.Byte(1)) is '\x01'
# (and not '1') - which messes up simple serializers like "json" module.
sdt = strip_dbus_types
if isinstance(data, dict): return dict((sdt(k), sdt(v)) for k,v in data.viewitems())
elif isinstance(data, (list, tuple)):
if data.signature == 'y': return dbus_bytes(data)
return map(sdt, data)
elif isinstance(data, types.NoneType): return data
for t in int, long, unicode, bytes, bool:
if isinstance(data, t): return t(data)
raise ValueError(( 'Failed to sanitize data type:'
' {} (mro: {}, value: {})' ).format(type(data), type(data).mro(), data))
p = p_ = lambda fmt,*a,**k: print(
bytes(fmt).format(*a,**k)
if isinstance(fmt, types.StringType)
else ([fmt, a, k] if a or k else repr(fmt)), file=sys.stderr )
def uid_str( seed=None, length=4,
_seed_gen=it.chain.from_iterable(it.imap(xrange, it.repeat(2**30))) ):
seed_bytes = length * 6 / 8
assert seed_bytes * 8 / 6 == length, [length, seed_bytes]
if seed is None: seed = '\0\0\0{:08x}'.format(next(_seed_gen))
seed = hashlib.sha256(bytes(seed)).digest()[:seed_bytes]
return base64.urlsafe_b64encode(seed)
def log_lines(log_func, lines, log_func_last=False):
if isinstance(lines, types.StringTypes):
lines = list(line.rstrip() for line in lines.rstrip().split('\n'))
uid = uid_str()
for n, line in enumerate(lines, 1):
if isinstance(line, types.StringTypes): line = '[%s] %s', uid, line
else: line = ('[{}] {}'.format(uid, line[0]),) + line[1:]
if log_func_last and n == len(lines): log_func_last(*line)
else: log_func(*line)
def force_bytes(bytes_or_unicode, encoding='utf-8', errors='backslashreplace'):
if isinstance(bytes_or_unicode, bytes): return bytes_or_unicode
return bytes_or_unicode.encode(encoding, errors)
def force_unicode(bytes_or_unicode, encoding='utf-8', errors='replace'):
if isinstance(bytes_or_unicode, unicode): return bytes_or_unicode
return bytes_or_unicode.decode(encoding, errors)
def force_str_type(bytes_or_unicode, val_or_type, **conv_kws):
if val_or_type is bytes or isinstance(val_or_type, bytes): f = force_bytes
elif val_or_type is unicode or isinstance(val_or_type, unicode): f = force_unicode
else: raise TypeError(val_or_type)
return f(bytes_or_unicode, **conv_kws)
def to_bytes(obj, **conv_kws):
if not isinstance(obj, types.StringTypes): obj = bytes(obj)
return force_bytes(obj, **conv_kws)
def strip_noise_bytes( obj, replace=u'_', encoding='utf-8',
byte_errors='backslashreplace', unicode_errors='replace' ):
'''Converts obj to byte representation, making sure
there arent any random weird chars that dont belong to any alphabet.
Only ascii non-letters are allowed, as fancy symbols don't seem to work too.'''
if not isinstance(obj, types.StringTypes): obj = bytes(obj)
if isinstance(obj, bytes):
obj = force_unicode(obj, encoding=encoding, errors=byte_errors)
obj_ucs = list()
for uc in obj:
try:
unicodedata.name(uc)
if unicodedata.category(uc) != 'Ll': uc.encode('ascii')
except (ValueError, UnicodeEncodeError):
if replace: obj_ucs.append(replace)
else: obj_ucs.append(uc)
obj = u''.join(obj_ucs)
return force_bytes(obj, encoding=encoding, errors=unicode_errors)
class PAMixerDBusBridgeError(Exception): pass
class PAMixerDBusError(Exception): pass
class PAMixerIPCError(Exception): pass
class PAMixerInvalidAction(Exception): pass
class PAMixerDBusBridge(object):
'''Class to import/spawn glib/dbus eventloop in a
subprocess and communicate with it via signals and pipes.
Presents async kinda-rpc interface to a dbus loop running in separate pid.
Protocol is json lines over stdin/stdout pipes,
with signal sent to parent pid on any dbus async event (e.g. signal) from child.'''
signal = signal.SIGUSR1 # used to break curses loop in the parent pid
poller = wakeup_fd = None
log_pipes = False # very noisy, but useful to see all chatter between parent/child
log_pipes_err_buffer = 30
handle_proplist_updates = False
poll_timeout = 1.0 # for recovery from race conditions, should be small
proxy_call_timeout = 5.0 # to crash instead of hangs
child_calls_cleanup = 0.03, 20.0 # chance, timeout
def __init__(self, child_cmd=None, core_pid=None, fatal=False, log_pipes=False):
self.child_cmd, self.core_pid, self.fatal = child_cmd, core_pid, fatal
self.child_sigs, self.child_calls, self._child_gc = deque(), dict(), set()
self.line_buff, self.log_pipes = '', log_pipes
self.line_debug = deque(maxlen=self.log_pipes_err_buffer)
def _child_readline_poll(self):
'child.stdout.readline() that also reacts to signals.'
# One shitty ipc instead of another... good job!
line = None
while True:
if '\n' in self.line_buff: line, self.line_buff = self.line_buff.split('\n', 1)
if line is not None: return line
try: evs = self.poller.poll(self.poll_timeout) or list()
except IOError as err:
if err.errno != errno.EINTR: raise
return ''
except KeyboardInterrupt: # ^C in console, probably because UI hangs
raise PAMixerIPCError('Poll call interrupted')
if not evs:
log.debug( 'Parent poll timeout event,'
' likely a race condition bug (timeout: %.1fs)', self.poll_timeout )
line = ''
for fd, ev in evs:
if fd == self.wakeup_fd.fileno():
try: self.wakeup_fd.read(1)
except IOError as err:
if err.errno != errno.EAGAIN: raise
log.debug('Got EAGAIN from'
' wakeup_fd returned by poll(), this should not happen')
if line is None: line = '' # make sure to break the loop here
else:
if not ev & select.EPOLLIN: raise IOError('Poll returned error event: {}'.format(ev))
try: chunk = self._child.stdout.read(2**20)
except IOError as err:
if err.errno != errno.EAGAIN: raise
continue
self.line_buff += chunk
def _child_readline(self, wait_for_cid=None, one_signal=False, init_line=False):
ts0 = mono_time()
while True:
if wait_for_cid and wait_for_cid in self.child_calls:
# XXX: check for errors indicating that dbus is gone here?
line_ts, line = self.child_calls.pop(wait_for_cid)
if random.random() < self.child_calls_cleanup[0]:
ts_deadline = mono_time() - self.child_calls_cleanup[1]
for k, (line_ts, line) in self.child_calls.items():
if line_ts < ts_deadline: self.child_calls.pop(k, None)
return line
try:
line = self._child_readline_poll().strip()
if not line: # likely a break on signal, shouldn't be too often
if mono_time() - ts0 > self.proxy_call_timeout:
raise PAMixerIPCError('Call timeout: {:.2f}s'.format(self.proxy_call_timeout))
continue
except PAMixerIPCError as err:
raise PAMixerIPCError(
'IPC error while waiting for event {!r} (line_buff: {!r}): {}'\
.format(dict(wait_for_cid=wait_for_cid, one_signal=one_signal), self.line_buff, err) )
if init_line:
assert line.strip() == 'ready', repr(line)
break
self.line_debug.append(('rpc-parent(raw) << %r', line))
if self.log_pipes: log.debug(*self.line_debug[-1])
try: line = json.loads(line)
except ValueError as err: # json module error doesn't provide the actual data
raise ValueError('Failed to parse line ({}): {!r}', err, line)
if line['t'] == 'signal':
self.child_sigs.append(line)
if one_signal: break
elif line['t'] in ['call_result', 'call_error']:
self.child_calls[line['cid']] = mono_time(), line
def call(self, func, args, **call_kws):
self.child_check_restart()
cid = uid_str()
call = dict(t='call', cid=cid, func=func, args=args, **call_kws)
try: call = json.dumps(call)
except Exception as err:
log.exception('Failed to encode data to json (error: %s), returning None: %r', err, call)
return None
assert '\n' not in call, repr(call)
res = Exception
for n in xrange(2): # even 2 is kinda generous - likely to be some bug
try:
self.line_debug.append(('rpc-parent(raw) >> %r', call))
if self.log_pipes: log.debug(*self.line_debug[-1])
self._child.stdin.write('{}\n'.format(call))
res = self._child_readline(wait_for_cid=cid)
if res['t'] == 'call_error'\
and res['err_type'] == u'org.freedesktop.DBus.Error.Disconnected':
# Reconnection works by restarting child, hence handled here
raise PAMixerDBusError(res['err_type'], res['err_msg'])
except Exception as err:
log.exception('Failure communicating with child pid, restarting it: %s', err)
if log.isEnabledFor(logging.INFO):
log_lines( log.info,
['Last pipe traffic (parent pid side):'] + list(self.line_debug) )
if self.fatal: break
self.child_kill()
self.child_check_restart()
else: break
if res is Exception:
raise PAMixerDBusBridgeError(
'Failed to communicate with child pid, even after restart' )
if res['t'] == 'call_error': raise PAMixerDBusError(res['err_type'], res['err_msg'])
assert res['t'] == 'call_result', res
return res['val']
def install_signal_handler(self, func):
self.signal_func = func
signal.signal(self.signal, self.signal_handler)
# Async signals also require async detection of when child died
# Popen can cause SIGCHLD, hence some workarounds
signal.signal(signal.SIGCHLD, self.child_check_restart)
def signal_handler(self, sig=None, frm=None):
log.debug('Signal handler triggered by: %s', sig)
if not self.child_sigs: self._child_readline(one_signal=True)
while True:
try: line = self.child_sigs.popleft()
except IndexError: break
self.signal_func(**line)
_child_proc = _child_check = None
@property
def _child(self): return self._child_proc
@_child.setter
def _child(self, proc):
if self._child_proc: self.poller.unregister(self._child_proc.stdout)
if proc:
flags = fcntl.fcntl(proc.stdout, fcntl.F_GETFL)
fcntl.fcntl(proc.stdout, fcntl.F_SETFL, flags | os.O_NONBLOCK)
self.poller.register(proc.stdout, select.EPOLLIN)
self._child_proc = proc
def child_start(self, gc_old_one=False):
if not self.poller:
self.poller, (r, w) = select.epoll(), os.pipe()
for fd in r, w:
fcntl.fcntl( fd, fcntl.F_SETFL,
fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK )
signal.set_wakeup_fd(w)
self.wakeup_fd = os.fdopen(r, 'rb', 0)
self.poller.register(self.wakeup_fd, select.EPOLLIN)
if self._child and gc_old_one:
err = self._child.wait()
self.line_buff = ''
self.line_debug.append(('--- child exit: %s', err))
self._child = None
if not self.child_cmd or self._child: return
self._child = subprocess.Popen( self.child_cmd,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True )
self._child_readline(init_line=True) # wait until it's ready
log.debug('Child process initialized (pid: %s)', self._child.pid)
def child_kill(self):
if self._child:
child, self._child = self._child, None
self._child_gc.add(child.pid)
try: child.kill() # no need to be nice here
except OSError as err:
log.debug('child_kill error: %s', err)
else:
log.debug('child_kill invoked with no child around')
def child_check_restart(self, sig=None, frm=None):
if self._child_check: return # likely due to SIGCHLD from Popen
self._child_check = True
try:
if self._child_gc: # these are cleaned-up just to avoid keeping zombies around
for pid in list(self._child_gc):
try: res = os.waitpid(pid, os.WNOHANG)
except OSError: res = pid, None
if res and res[0]: self._child_gc.remove(pid)
self.child_start()
if not self._child: return # can't be started
if self._child.poll() is not None:
log.debug('glib/dbus child pid (%s) died. restarting it', self._child.pid)
self.child_start(gc_old_one=True)
finally: self._child_check = False
def _get_bus_address(self):
srv_addr = os.environ.get('PULSE_DBUS_SERVER')
if not srv_addr and os.access('/run/pulse/dbus-socket', os.R_OK | os.W_OK):
srv_addr = 'unix:path=/run/pulse/dbus-socket' # well-known system-wide daemon socket
if not srv_addr:
srv_addr = self._dbus.SessionBus()\
.get_object('org.PulseAudio1', '/org/pulseaudio/server_lookup1')\
.Get( 'org.PulseAudio.ServerLookup1',
'Address', dbus_interface='org.freedesktop.DBus.Properties' )
return srv_addr
def _get_bus(self, srv_addr=None, dont_start=False):
while not srv_addr:
try:
srv_addr = self._get_bus_address()
log.debug('Got pa-server bus from dbus: %s', srv_addr)
except self._dbus.exceptions.DBusException as err:
if dont_start or srv_addr is False or\
err.get_dbus_name() != 'org.freedesktop.DBus.Error.ServiceUnknown':
raise
subprocess.Popen(
['pulseaudio', '--start', '--log-target=syslog'],
stdout=open(os.devnull, 'wb'), stderr=subprocess.STDOUT ).wait()
log.debug('Started new pa-server instance')
# from time import sleep
# sleep(1) # XXX: still needed?
srv_addr = False # to avoid endless loop
return self._dbus.connection.Connection(srv_addr)
def _dbus_val(self, args, translate=None):
if translate == 'volume': args[-1] = list(self._dbus.UInt32(round(v)) for v in args[-1])
if translate == 'path': args[-1] = self._dbus.ObjectPath(args[-1])
return args
def _loop_exc_stop(self, exc_info=None):
self.loop_exc = exc_info or sys.exc_info()
assert self.loop_exc
self.loop.quit()
def _glib_err_wrap(func):
@ft.wraps(func)
def _wrapper(self, *args, **kws):
try: func(self, *args, **kws)
except Exception as err:
exc_info = sys.exc_info()
log.exception('glib handler failed: %s', err)
self._loop_exc_stop(exc_info)
return True # glib disables event handler otherwise
return _wrapper
@_glib_err_wrap
def _core_notify(self, _signal=False, **kws):
chunk = dict(**kws)
self.line_debug.append(('rpc-child(py) >> %s', chunk))
if self.log_pipes: log.debug(*self.line_debug[-1])
chunk = json.dumps(chunk)
assert '\n' not in chunk, chunk
try:
if _signal: os.kill(self.core_pid, self.signal)
self.stdout.write('{}\n'.format(chunk))
except IOError: return self.loop.quit() # parent is gone, we're done too
@_glib_err_wrap
def _rpc_call(self, buff, stream=None, ev=None):
assert stream is self.stdin, [stream, self.stdin]
if ev is None: ev = self._glib.IO_IN
if ev & (self._glib.IO_ERR | self._glib.IO_HUP):
return self.loop.quit() # parent is gone, we're done too
elif ev & self._glib.IO_IN:
while True:
try: chunk = self.stdin.read(2**20)
except IOError as err:
if err.errno != errno.EAGAIN: raise
chunk = None
if not chunk: break
buff.append(chunk)
while True:
# Detect if there are any full requests buffered
for n, chunk in enumerate(buff):
if '\n' in chunk: break
else: break # no more full requests
# Read/decode next request from buffer
req = list()
for m in xrange(n+1):
chunk = buff.popleft()
if m == n:
chunk, chunk_next = chunk.split('\n', 1)
buff.appendleft(chunk_next)
assert '\n' not in chunk, chunk
req.append(chunk)
req = json.loads(''.join(req))
self.line_debug.append(('rpc-child(py) << %s', req))
if self.log_pipes: log.debug(*self.line_debug[-1])
# Run dbus call and return the result, synchronously
assert req['t'] == 'call', req
func, kws = req['func'], dict()
obj_path, iface = req.get('obj'), req.get('iface')
args, translate = req['args'], req.get('translate')
if iface: kws['dbus_interface'] = dbus_abbrev(iface)
if translate: args = self._dbus_val(args, translate)
obj = self.core if not obj_path\
else self.bus.get_object(object_path=obj_path) # XXX: bus gone handling
log.debug('DBus call: %s %s %s', func, args, kws)
try: res = getattr(obj, func)(*args, **kws)
except self._dbus.exceptions.DBusException as err:
self._core_notify( t='call_error', cid=req['cid'],
err_type=err.get_dbus_name(), err_msg=err.message )
else:
res = strip_dbus_types(res)
self._core_notify(t='call_result', cid=req['cid'], val=res)
else:
log.warn('Unrecognized event type from glib: %r', ev)
@_glib_err_wrap
def _relay_signal(self, data=None, sig_name=None, src_obj_path=None):
props = sig_name == 'PropertyListUpdated' and strip_dbus_types(data)
log.debug('DBus signal (from %s): %s %s', src_obj_path, sig_name, props or data)
if props:
self._core_notify( _signal=True, t='signal',
name='PropertyListUpdated', obj=src_obj_path, props=props )
else:
self._core_notify(_signal=True, t='signal', name=sig_name, obj=data)
def child_run(self):
from dbus.mainloop.glib import DBusGMainLoop
from gi.repository import GLib
import dbus
def excepthook(t, v, tb, hook=sys.excepthook):
time.sleep(0.2) # to dump parent/child tracebacks non-interleaved
return hook(t, v, tb)
sys.excepthook = excepthook
self._dbus, self._glib = dbus, GLib
# Disable stdin/stdout buffering
self.stdout = os.fdopen(sys.stdout.fileno(), 'wb', 0)
self.stdin = os.fdopen(sys.stdin.fileno(), 'rb', 0)
self.stdout.write('ready\n') # wait for main process to get ready, signal readiness
log.debug('DBus signal handler subprocess started')
DBusGMainLoop(set_as_default=True)
self.loop, self.loop_exc = GLib.MainLoop(), None
self.bus = self._get_bus() # XXX: bus gone handling
self.core = self.bus.get_object(object_path='/org/pulseaudio/core1')
rpc_buffer = deque()
flags = fcntl.fcntl(self.stdin, fcntl.F_GETFL)
fcntl.fcntl(self.stdin, fcntl.F_SETFL, flags | os.O_NONBLOCK)
self._glib.io_add_watch( self.stdin,
self._glib.PRIORITY_DEFAULT,
self._glib.IO_IN | self._glib.IO_ERR | self._glib.IO_HUP,
ft.partial(self._rpc_call, rpc_buffer) )
signals = ['NewSink', 'SinkRemoved', 'NewPlaybackStream', 'PlaybackStreamRemoved']
if self.handle_proplist_updates: signals.append('Stream.PropertyListUpdated')
for sig_name in signals:
sig_name_last = sig_name.rsplit('.')[-1]
self.bus.add_signal_receiver(
ft.partial(self._relay_signal, sig_name=sig_name_last),
sig_name_last, path_keyword='src_obj_path' )
self.core.ListenForSignal(dbus_join('pulse', [sig_name]), self._dbus.Array(signature='o'))
self._glib.unix_signal_add(self._glib.PRIORITY_HIGH, signal.SIGTERM, self.loop.quit)
try: self.loop.run()
except KeyboardInterrupt: pass
# XXX: wrapper loop here, in case of *clean* loop.quit() yet dbus not being dead
if self.loop_exc: raise self.loop_exc[0], self.loop_exc[1], self.loop_exc[2]
class PAMixerMenuItem(object):
dbus_types = dict(sink='Device', stream='Stream')
def __init__(self, menu, obj_type, obj_path):
self.menu, self.t, self.conf, self.call = menu, obj_type, menu.conf, menu.call
self.dbus_path, self.dbus_type = obj_path, dbus_join('pulse', [self.dbus_types[self.t]])
self.hidden, self.created_ts = False, mono_time()
self.update_name()
if self.conf.dump_stream_params:
from pprint import pprint
dump = OrderedDict(path=self.dbus_path, name=self.name)
dump['props'] = sorted(self.props.items())
pprint(dump.items(), sys.stderr)
def _prop_get(self, k, _err_none=dbus_join('pulse', ['NoSuchPropertyError'])):
try: return self.call('Get', [self.dbus_type, k], obj=self.dbus_path, iface='props')
except PAMixerDBusError as err:
if err.args[0] != _err_none: raise
def __repr__(self):
return '<{}[{:x}] {}[{}]: {}>'.format(
self.__class__.__name__, id(self), self.t, uid_str(self.dbus_path), self.name )
def update_name(self, props_update=None):
if props_update is None: self.props = self._prop_get('PropertyList')
else: self.props.update(props_update)
name = self._get_name()
if not name: name = self._get_name_unique('null')
self.name = force_unicode(name)
def _get_name_unique(self, name):
return '{} #{}'.format(force_bytes(name), uid_str())
def _get_name_descriptive(self):
'Can probably fail with KeyError if something is really wrong with stream/device props.'
ext, props = None, dict(
(force_bytes(k), strip_noise_bytes(v, self.conf.broken_chars_replace))
for k,v in self.props.viewitems() )
if self.t == 'stream':
if self.conf.use_media_name:
name = props.get('media.name')
if name and name not in self.conf.placeholder_media_names: return name
try: name = props['application.name']
except KeyError: # some synthetic stream with non-descriptive name
name = self._get_name_unique(props['media.name'])
ext = '({application.process.user}@'\
'{application.process.host}:{application.process.id})'
elif self.t == 'sink':
if self.conf.use_device_name: name = self._prop_get('Name')
else:
name = props.get('alsa.id')\
or props.get('device.description') or props.get('device.api')
if not name:
try: name = '{}.{}'.format(props['device.api'], props['device.string'])
except KeyError:
self._get_name_unique(props['device.description'])
ext = '({device.profile.name}@{alsa.driver_name})'
else: raise KeyError('Unknown menu-item type (for naming): {}'.format(self.t))
if ext:
try:
name = '{} {}'.format( name,
re.sub(r'\{([^}]+)\}', r'{}', ext).format(
*op.itemgetter(*re.findall(r'\{([^}]+)\}', ext))(props) ) )
except KeyError as err:
log.debug( 'Unable to get extended descriptive name'
' (trype: %r, path: %s) due to missing key: %s', self.t, self.dbus_path, err )
return name
def _get_name(self):
try: return self._get_name_descriptive()
except Exception as err:
if self.menu.fatal: raise
log.info('Failed to get descriptive name for %r: %s', self.t, self.dbus_path)
return self._get_name_unique(self.t)
def _dbus_prop(name=None, dbus_name=None, translate=None):
dbus_name = dbus_name or name.title()
def dbus_prop_get(self): return self._prop_get(dbus_name)
def dbus_prop_set(self, val):
return self.call( 'Set', [self.dbus_type, dbus_name, val],
obj=self.dbus_path, iface='props', translate=translate )
return property( dbus_prop_get, dbus_prop_set,
lambda: None, 'DBus {} property proxy'.format(dbus_name) )
muted = _dbus_prop('mute')
volume_chans = _dbus_prop('volume', translate='volume') # tuple of uints
@property
def volume(self):
'Volume as one float in 0-1 range.'
volume_chans = self.volume_chans
volume_abs = sum(volume_chans) / float(len(volume_chans))
volume_abs = max(0, volume_abs - self.conf.min_level)
return min(1.0, volume_abs / float(self.conf.max_level))
@volume.setter
def volume(self, val):
log.debug('Setting volume: %s %s', val, self)
val, chans = min(1.0, max(0, val)), len(self.volume_chans)
self.volume_chans = [int(val * self.conf.max_level) + self.conf.min_level] * chans
_port_dbus_path = _dbus_prop(dbus_name='ActivePort', translate='path')
@property
def port(self):
if self.t != 'sink': return
port_dbus_path = self._port_dbus_path
if port_dbus_path:
return self.call('Get', [dbus_join( 'pulse',
['DevicePort'] ), 'Name'], obj=port_dbus_path, iface='props')
@port.setter
def port(self, name):
if self.t != 'sink':
raise PAMixerInvalidAction(( 'Setting ports is only'
' valid for {!r}-type streams, not {!r}-type' ).format('sink', self.t))
self._port_dbus_path = self.call( 'GetPortByName',
[name], obj=self.dbus_path, iface=self.dbus_type )
def muted_toggle(self): self.muted = not self.muted
def volume_change(self, delta): self.volume += delta
def get_next(self): return self.menu.item_after(self)
def get_prev(self): return self.menu.item_before(self)
class PAMixerMenu(object):
focus_policies = dict(first=op.itemgetter(0), last=op.itemgetter(-1))
def __init__(self, dbus_bridge, conf=None, fatal=False):
self.call, self.fatal, self.conf = dbus_bridge.call, fatal, conf or Conf()
self.items, self.item_objs = list(), OrderedDict()
self._update_lock = self._update_signal = False
def update(self):
self._update_lock, self._update_signal = True, False
obj_paths_new, obj_paths_gone = set(), set(self.item_objs)
for obj_type, prop in [('sink', 'Sinks'), ('stream', 'PlaybackStreams')]:
for obj_path in self.call('Get', [dbus_abbrev('pulse'), prop], iface='props'):
if obj_path not in self.item_objs:
obj_paths_new.add(obj_path)
self.item_objs[obj_path] = PAMixerMenuItem(self, obj_type, obj_path)
else: obj_paths_gone.remove(obj_path)
for obj_path in obj_paths_gone: del self.item_objs[obj_path]
for obj_path in obj_paths_new: self.apply_stream_params(self.item_objs[obj_path])
# Sort sinks to be always on top
sinks, streams, ordered = list(), list(), True
for obj_path, item in self.item_objs.viewitems():
if item.t == 'sink':
if streams: ordered = False
sinks.append((obj_path, item))
else: streams.append((obj_path, item))
if not ordered:
self.item_objs.clear()
for obj_path, item in it.chain(sinks, streams): self.item_objs[obj_path] = item
self.items = list(item for item in self.item_objs.values() if not item.hidden)
while self._update_signal: self.update() # change was signaled during update
self._update_lock = False
def update_signal(self, name, obj, props=None, **signal_kws):
# XXX: do less than full refresh here
log.debug('update_signal << %s %s', name, obj)
if self._update_lock: self._update_signal = True
elif name == 'PropertyListUpdated':
item = self.item_objs.get(obj)
if item: item.update_name(props_update=props)
def apply_stream_params(self, item):
for sec, checks in (self.conf.stream_params or dict()).viewitems():
match, params = True, OrderedDict()
for t, k, v in checks:
if t == 'match':
if match and not v.search(item.props.get(k, '')): match = False
elif t == 'set': params[k] = v
else: raise ValueError((t, k, v))
if match:
log.debug( 'Matched stream %r (name: %r)'
' to config section: %s', item.dbus_path, item.name, sec )
for k, v in params.viewitems():
m = re.search(r'^volume-(min|max|set)$', k)
if m:
vol = float(v)
if m.group(1) == 'max':
if item.volume > vol: item.volume = vol
elif m.group(1) == 'min':
if item.volume < vol: item.volume = vol
elif m.group(1) == 'set': item.volume = vol
elif k == 'hidden': item.hidden = self.conf.parse_bool(v)
elif k == 'port':
try: item.port = v
except PAMixerInvalidAction as err:
log.error( 'Unable to set port for stream %r'
' (name: %r, config section: %s): %s', item, item.name, sec, err )
else:
log.debug('Unrecognized stream parameter (section: %r): %r (value: %r)', sec, k, v)
@property
def item_list(self):
self.update()
return self.items
def item_default(self):
if not self.items: return
func = self.focus_policies[self.conf.focus_default]
return func(self.items)
def item_newer(self, ts):
items = sorted(self.items, key=op.attrgetter('created_ts'), reverse=True)
if items and items[0].created_ts > ts: return items[0]
def item_after(self, item=None):
if item:
for item2 in self.items:
if item is StopIteration: return item2
if item2.dbus_path == item.dbus_path: item = StopIteration
return self.item_default()
def item_before(self, item=None):
if item:
item_prev = None
for item2 in self.items:
if item2.dbus_path == item.dbus_path:
if not item_prev: break
return item_prev
item_prev = item2
return self.item_default()
class PAMixerUI(object):
item_len_min = 10
bar_len_min = 10
bar_caps_func = staticmethod(lambda bar='': ' [ ' + bar + ' ]')
border = 1
name_cut_funcs = dict(left=lambda n,c: n[max(0, len(n) - c):], right=lambda n,c: n[:c])
def __init__(self, menu):
self.menu, self.conf = menu, menu.conf
def __enter__(self):
self.c = None
return self
def __exit__(self, exc_t, exc_val, exc_tb):
if self.c:
self.c.endwin()
self.c = None
def c_win_init(self):
# Used to create a window with borders here,
# but these borders don't seem to be cleared properly.
# So using stdscr now, and painting borders in the app.
win = self.c_stdscr
win.keypad(True)
win.bkgdset(' ')
return win
def c_win_size(self, win):
'Returns "nlines, ncols, begin_y, begin_x", taking border into account.'
size = win.getmaxyx()
nlines, ncols = max(1, size[0] - 2 * self.border), max(1, size[1] - 2 * self.border)
return nlines, ncols, min(self.border, size[0]), min(self.border, size[1])
def c_win_draw(self, win, items, item_hl):
win.erase()
if not items: return
win_rows, win_len, pad_x, pad_y = self.c_win_size(win)
if win_len <= 1: return # nothing fits
# Fit stuff vertically
if win_rows < len(items) + 1: # pick/display items near highlighted one
pos, offset = items.index(item_hl), 1
items, items_fit = dict(enumerate(items)), {pos: items[pos]}
while True:
ps = list(p for p in [pos + offset, pos - offset] if p in items)
if not ps: break
for p in ps:
items_fit[p] = items[p]
if win_rows <= len(items_fit) + 1: break
else:
offset += 1
continue
break
items = map(op.itemgetter(1), sorted(items_fit.viewitems(), key=op.itemgetter(0)))
# Fit stuff horizontally
mute_button_len, level_len = 2, 5
item_len_max = max(len(item.name) for item in items)
if self.conf.name_show_level: item_len_max += level_len
if self.conf.name_len_max:
item_len_max = min(item_len_max, self.conf.name_len_max)
bar_len = win_len - item_len_max - mute_button_len - len(self.bar_caps_func())
if bar_len < self.bar_len_min:
item_len_max = max(self.item_len_min, item_len_max + bar_len - self.bar_len_min)
bar_len = win_len - item_len_max - mute_button_len - len(self.bar_caps_func())
if bar_len <= 0: item_len_max = win_len # just draw labels
if item_len_max < self.item_len_min: item_len_max = max(len(item.name) for item in items)
for row, item in enumerate(items):
if row >= win_rows - 1: break # not sure why bottom window row seem to be unusable
row += pad_y
attrs = self.c.A_REVERSE if item is item_hl else self.c.A_NORMAL
name_len = item_len_max - bool(self.conf.name_show_level) * level_len
name = force_bytes(self.name_cut_funcs[self.conf.name_cut_from](item.name, name_len))
if self.conf.name_show_level:
level = max(0, min(100, int(round(item.volume * 100))))
if level == 0: level = '--'
elif level == 100: level = '++'
else: level = '{:>2d}'.format(level)
name = '[{}] {}'.format(level, name)
win.addstr(row, 0, ' ' * pad_x)
win.addstr(row, pad_x, name, attrs)
item_name_end = item_len_max + pad_x
if win_len > item_name_end + mute_button_len:
if item.muted: mute_button = ' M'
else: mute_button = ' -'
win.addstr(row, item_name_end, mute_button)
if bar_len > 0:
bar_fill = int(round(item.volume * bar_len))
bar = self.bar_caps_func('#' * bar_fill + '-' * (bar_len - bar_fill))
win.addstr(row, item_name_end + mute_button_len, bar)
def c_key(self, k):
if len(k) == 1: return ord(k)
return getattr(self.c, 'key_{}'.format(k).upper())
_item_hl = _item_hl_ts = None
@property
def item_hl(self):
if self._item_hl and self.conf.focus_new_items:
ts = self._item_hl_ts
if ts: ts += self.conf.focus_new_items_delay or 0
item = self.menu.item_newer(ts)
if item: self._item_hl = item
return self._item_hl
@item_hl.setter
def item_hl(self, item):
self._item_hl, self._item_hl_ts = item, mono_time()
def _run(self, stdscr):
c, self.c_stdscr = self.c, stdscr
key_match = lambda key,*choices: key in map(self.c_key, choices)
c.curs_set(0)
c.use_default_colors()
win = self.c_win_init()
self.conf.adjust_step /= 100.0
while True:
try:
# XXX: full refresh on every keypress is a bit excessive
items, item_hl = self.menu.item_list, self.item_hl
if item_hl is None: item_hl = self.item_hl = self.menu.item_default()
if item_hl not in items: item_hl = self.menu.item_default()
self.c_win_draw(win, items, item_hl)
except PAMixerDBusError as err:
if err.args[0] == 'org.freedesktop.DBus.Error.UnknownMethod': continue
raise # XXX: check all the old pitfalls here
watchdog_handle_ping()
key = None
while True:
try: key = win.getch()
except KeyboardInterrupt: key = self.c_key('q')
except c.error: break
try: key_name = c.keyname(key)
except ValueError:
key_name = 'unknown' # e.g. "-1"
if watchdog_handle_ping():
assert win.getch() == -1 # no idea why
continue
break
if key is None: continue
log.debug('Keypress event: %s (%r)', key, key_name)
if item_hl:
if key_match(key, 'up', 'k', 'p'): self.item_hl = item_hl.get_prev()
elif key_match(key, 'down', 'j', 'n'): self.item_hl = item_hl.get_next()
elif key_match(key, 'left', 'h', 'b'):
item_hl.volume_change(-self.conf.adjust_step)
elif key_match(key, 'right', 'l', 'f'): item_hl.volume_change(self.conf.adjust_step)
elif key_match(key, ' ', 'm'): item_hl.muted_toggle()
elif key_name.isdigit(): # 1-0 keyboard row
item_hl.volume = (float(key_name) or 10.0) / 10 # 0 is 100%
if key_match(key, 'resize', '\f'):
if self.conf.overkill_redraw:
c.endwin()
stdscr.refresh()
win = self.c_win_init()
else:
win.resize(*win.getmaxyx())
elif key_match(key, 'q'): break
def run(self):
import locale, curses # has a ton of global state
locale.setlocale(locale.LC_ALL, '') # see top of "curses" module doc for rationale
self.c = curses
self.c.wrapper(self._run)
def watchdog_run(conf, args):
signal.signal(signal.SIGUSR2, watchdog_run_pong)
proc = proc_poller = None
proc_pongs = 0
while True:
if not proc:
r, w = os.pipe()
proc_opts = list(args) + [
'--parent-pid-do-not-use', 'w{}-{}'.format(os.getpid(), w) ]
proc = subprocess.Popen(self_exec_cmd(*proc_opts))
os.close(w)
assert os.read(r, 1) == '!'
proc_poller = select.epoll()
proc_poller.register(r, select.EPOLLHUP)
watchdog_run.ping_last_ts = ts_ping = ts_timeout = None
ts = mono_time()
if ts_timeout and ts >= ts_timeout:
proc_running, proc_restart = proc.poll() is None, proc_pongs >= 2
log.debug(
'wd: !!! sending exit-signal (to %s, running: %s, restart: %s, pongs: %s) !!!',
proc.pid, proc_running, proc_restart, proc_pongs )
if proc:
if proc_running:
try:
proc.send_signal(signal.SIGALRM)
if not proc_poller.poll(min(conf.watchdog_ping_timeout, 3)):
log.debug('wd: killing stuck pid %s', proc.pid)
proc.send_signal(signal.SIGCONT)
proc.kill()
except (OSError, IOError) as err:
if err.errno not in [errno.ESRCH, errno.EINTR]: raise
proc.wait()
proc, proc_pongs = None, 0
if not proc_restart:
log.error( 'wd: main process failed without enough'
' pongs from it, not restarting to avoid endless restart-loop' )
return 1
continue
ts_ping_last = getattr(watchdog_run, 'ping_last_ts', None)
if ts_ping_last:
watchdog_run.ping_last_ts = None
proc_pongs += 1
ts_timeout = ts_ping_last + conf.watchdog_ping_timeout
if not ts_ping: ts_ping = ts_ping_last + conf.watchdog_ping_interval
if ts_ping and ts >= ts_ping:
log.debug('wd: sending ping (to %s)', proc.pid)
try: proc.send_signal(signal.SIGUSR2)
except OSError as err:
if err.errno != errno.ESRCH: raise
continue
while ts_ping <= ts: ts_ping = ts_ping + conf.watchdog_ping_interval
deadline = min(ts_ping or ts_timeout, ts_timeout or ts_ping)\
if ts_ping or ts_timeout else (ts + conf.watchdog_ping_interval)
try: proc_poller.poll(max(0.1, deadline - ts))
except IOError: pass
if proc.poll() is not None:
err = proc.wait()
if err != 0: proc = None
log.debug( 'wd: detected process exit'
' (code: %s), %s', err, 'exiting' if proc else 'restarting it' )
if proc: return err
def watchdog_run_pong(sig=None, frm=None):
watchdog_run.ping_last_ts = mono_time()
# log.debug('wd: received pong (ts: %s)', watchdog_run.ping_last_ts)
def watchdog_handle(pid):
def ping_recv(sig=None, frm=None):
# log.debug('wd-handler: received ping from wd (%s)', pid)
watchdog_handle.pong_pid = pid
def die(sig, frm):
log.debug('wd-handler: !!! received exit-signal (from %s) !!!', pid)
try: curses.endwin()
except: pass
sys.exit(1)
signal.signal(signal.SIGUSR2, ping_recv)
signal.signal(signal.SIGALRM, die)
ping_recv()
watchdog_handle_ping()
def watchdog_handle_ping():
pid = getattr(watchdog_handle, 'pong_pid', None)
if not pid: return
log.debug('wd-handler: sending pong to wd-pid (%s)', pid)
watchdog_handle.pong_pid = None
os.kill(pid, signal.SIGUSR2)
return True
def self_exec_cmd(*args):
'Returns list of [binary, args ...] to run this script with provided args.'
args = [__file__] + list(args)
if os.access(__file__, os.X_OK): return args
return [sys.executable or 'python2'] + args
def main(args=None):
conf = Conf()
conf_file = os.path.expanduser('~/.pulseaudio-mixer-cli.cfg')
try: conf_file = open(conf_file)
except (OSError, IOError) as err: pass
else: update_conf_from_file(conf, conf_file)
import argparse
parser = argparse.ArgumentParser(description='Command-line PulseAudio mixer tool.')
parser.add_argument('-a', '--adjust-step',
action='store', type=int, metavar='step', default=conf.adjust_step,
help='Adjustment for a single keypress in interactive mode (0-100%%, default: %(default)s%%).')
parser.add_argument('-l', '--max-level',
action='store', type=int, metavar='level', default=conf.max_level,
help='Value to treat as max (default: %(default)s).')
parser.add_argument('-n', '--use-media-name',
action='store_true', default=conf.use_media_name,
help='Display streams by "media.name" property, if possible.'
' Default is to prefer application name and process properties.')
parser.add_argument('-v', '--verbose',
action='store_true', default=conf.verbose,
help='Dont close stderr to see any sort of errors (which'
' mess up curses interface, thus silenced that way by default).')
parser.add_argument('-w', '--watchdog',
action='store_true', default=conf.watchdog,
help='Run watchdog pid to restart the thing if it hangs.')
parser.add_argument('--dump-stream-params',
action='store_true', help='Dump all parameters for each stream to stderr.')
parser.add_argument('--debug', action='store_true', help='Verbose operation mode.')
parser.add_argument('--debug-pipes', action='store_true',
help='Also logs chatter between parent/child pids. Very noisy, only useful with --debug.')
parser.add_argument('--fatal', action='store_true',
help='Dont try too hard to recover from errors. For debugging purposes only.')
parser.add_argument('--parent-pid-do-not-use', metavar='pid',
help='Used internally to spawn dbus sub-pid, should not be used directly.')
args = sys.argv[1:] if args is None else args
opts = parser.parse_args(args)
for k,v in vars(opts).viewitems(): setattr(conf, k, v)
del opts
global log, print
log_pid = os.getpid()
logging.basicConfig(
level=logging.DEBUG if conf.debug else logging.WARNING,
format='%(asctime)s :: {} %(levelname)s :: %(message)s'.format(uid_str(log_pid)),
datefmt='%Y-%m-%d %H:%M:%S' )
log = logging.getLogger()
print = ft.partial(print, file=sys.stderr) # stdout is used by curses or as a pipe (child)
log.debug('Starting script (child: %s, pid: %s)', bool(conf.parent_pid_do_not_use), log_pid)
if conf.parent_pid_do_not_use:
pid = conf.parent_pid_do_not_use
if pid.startswith('w'): conf.watchdog_opts = map(int, pid.lstrip('w').split('-', 1))
else:
dbus_bridge = PAMixerDBusBridge(core_pid=int(pid), log_pipes=conf.debug_pipes)
if conf.use_media_name: dbus_bridge.handle_proplist_updates = True
try: return dbus_bridge.child_run()
finally:
if log.isEnabledFor(logging.INFO):
log_lines( log.info,
['Last pipe traffic (child pid side):'] + list(dbus_bridge.line_debug) )
if not conf.watchdog_opts:
if conf.watchdog:
try: return watchdog_run(conf, args)
except KeyboardInterrupt: return 0
else:
pid, fd = conf.watchdog_opts
os.write(fd, '!')
watchdog_handle(pid)
dbus_bridge = ['--parent-pid-do-not-use', bytes(os.getpid())]
if conf.debug:
dbus_bridge += ['--debug']
if conf.debug_pipes: dbus_bridge += ['--debug-pipes']
if conf.use_media_name: dbus_bridge += ['--use-media-name']
dbus_bridge = PAMixerDBusBridge(
self_exec_cmd(*dbus_bridge), fatal=conf.fatal, log_pipes=conf.debug_pipes )
menu = PAMixerMenu(dbus_bridge, conf, fatal=conf.fatal)
dbus_bridge.install_signal_handler(menu.update_signal)
dbus_bridge.child_start()
with PAMixerUI(menu) as curses_ui:
# Any output will mess-up curses ui, so try to close sys.stderr if possible
if not conf.verbose and not conf.debug\
and not conf.dump_stream_params: sys.stderr.close()
log.debug('Entering curses ui loop...')
try: curses_ui.run()
except:
if log.isEnabledFor(logging.INFO):
log_lines( log.info,
['Last pipe traffic (parent pid side):'] + list(dbus_bridge.line_debug) )
raise
log.debug('Finished')
if __name__ == '__main__': sys.exit(main())
|
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Font(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "heatmap.hoverlabel"
_path_str = "heatmap.hoverlabel.font"
_valid_props = {"color", "colorsrc", "family", "familysrc", "size", "sizesrc"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on Chart Studio Cloud for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on Chart Studio Cloud for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
family .
size
sizesrc
Sets the source reference on Chart Studio Cloud for
size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Font object
Sets the font used in hover labels.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.heatmap.hoverlabel.Font`
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
family .
size
sizesrc
Sets the source reference on Chart Studio Cloud for
size .
Returns
-------
Font
"""
super(Font, self).__init__("font")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.heatmap.hoverlabel.Font
constructor must be a dict or
an instance of :class:`plotly.graph_objs.heatmap.hoverlabel.Font`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("colorsrc", None)
_v = colorsrc if colorsrc is not None else _v
if _v is not None:
self["colorsrc"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("familysrc", None)
_v = familysrc if familysrc is not None else _v
if _v is not None:
self["familysrc"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
_v = arg.pop("sizesrc", None)
_v = sizesrc if sizesrc is not None else _v
if _v is not None:
self["sizesrc"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 VMware, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: linb, VMware
import mock
from neutron.common import config as n_config
from neutron import context
from neutron.openstack.common import uuidutils
from neutron.plugins.nicira.dbexts import vcns_db
from neutron.plugins.nicira.vshield import (
vcns_driver)
from neutron.plugins.nicira.vshield.common import (
exceptions as vcns_exc)
from neutron.tests.unit.db.loadbalancer import test_db_loadbalancer
from neutron.tests.unit.nicira import get_fake_conf
from neutron.tests.unit.nicira import VCNS_NAME
from neutron.tests.unit.nicira.vshield import fake_vcns
_uuid = uuidutils.generate_uuid
VSE_ID = 'edge-1'
POOL_MAP_INFO = {
'pool_id': None,
'edge_id': VSE_ID,
'pool_vseid': 'pool-1'}
VCNS_CONFIG_FILE = get_fake_conf("vcns.ini.test")
class VcnsDriverTestCase(test_db_loadbalancer.LoadBalancerPluginDbTestCase):
def vcns_loadbalancer_patch(self):
instance = self.mock_vcns.start()
instance.return_value.create_vip.side_effect = (
self.fc2.create_vip)
instance.return_value.get_vip.side_effect = (
self.fc2.get_vip)
instance.return_value.update_vip.side_effect = (
self.fc2.update_vip)
instance.return_value.delete_vip.side_effect = (
self.fc2.delete_vip)
instance.return_value.create_pool.side_effect = (
self.fc2.create_pool)
instance.return_value.get_pool.side_effect = (
self.fc2.get_pool)
instance.return_value.update_pool.side_effect = (
self.fc2.update_pool)
instance.return_value.delete_pool.side_effect = (
self.fc2.delete_pool)
instance.return_value.create_health_monitor.side_effect = (
self.fc2.create_health_monitor)
instance.return_value.get_health_monitor.side_effect = (
self.fc2.get_health_monitor)
instance.return_value.update_health_monitor.side_effect = (
self.fc2.update_health_monitor)
instance.return_value.delete_health_monitor.side_effect = (
self.fc2.delete_health_monitor)
instance.return_value.create_app_profile.side_effect = (
self.fc2.create_app_profile)
instance.return_value.delete_app_profile.side_effect = (
self.fc2.delete_app_profile)
self.pool_id = None
self.vip_id = None
def setUp(self):
n_config.parse(['--config-file', VCNS_CONFIG_FILE])
# mock vcns
self.fc2 = fake_vcns.FakeVcns(unique_router_name=False)
self.mock_vcns = mock.patch(VCNS_NAME, autospec=True)
self.vcns_loadbalancer_patch()
self.nvp_service_plugin_callback = mock.Mock()
self.driver = vcns_driver.VcnsDriver(self.nvp_service_plugin_callback)
super(VcnsDriverTestCase, self).setUp()
self.addCleanup(self.fc2.reset_all)
self.addCleanup(self.mock_vcns.stop)
def tearDown(self):
super(VcnsDriverTestCase, self).tearDown()
class TestEdgeLbDriver(VcnsDriverTestCase):
def test_create_and_get_vip(self):
ctx = context.get_admin_context()
with self.pool(no_delete=True) as pool:
self.pool_id = pool['pool']['id']
POOL_MAP_INFO['pool_id'] = pool['pool']['id']
vcns_db.add_vcns_edge_pool_binding(ctx.session, POOL_MAP_INFO)
with self.vip(pool=pool) as res:
vip_create = res['vip']
self.driver.create_vip(ctx, VSE_ID, vip_create)
vip_get = self.driver.get_vip(ctx, vip_create['id'])
for k, v in vip_get.iteritems():
self.assertEqual(vip_create[k], v)
def test_update_vip(self):
ctx = context.get_admin_context()
with self.pool(no_delete=True) as pool:
self.pool_id = pool['pool']['id']
POOL_MAP_INFO['pool_id'] = pool['pool']['id']
vcns_db.add_vcns_edge_pool_binding(ctx.session, POOL_MAP_INFO)
with self.vip(pool=pool) as res:
vip_create = res['vip']
self.driver.create_vip(ctx, VSE_ID, vip_create)
vip_update = {'id': vip_create['id'],
'pool_id': pool['pool']['id'],
'name': 'update_name',
'description': 'description',
'address': 'update_address',
'port_id': 'update_port_id',
'protocol_port': 'protocol_port',
'protocol': 'update_protocol'}
self.driver.update_vip(ctx, vip_update)
vip_get = self.driver.get_vip(ctx, vip_create['id'])
for k, v in vip_get.iteritems():
if k in vip_update:
self.assertEqual(vip_update[k], v)
def test_delete_vip(self):
ctx = context.get_admin_context()
with self.pool(no_delete=True) as pool:
self.pool_id = pool['pool']['id']
POOL_MAP_INFO['pool_id'] = pool['pool']['id']
vcns_db.add_vcns_edge_pool_binding(ctx.session, POOL_MAP_INFO)
with self.vip(pool=pool) as res:
vip_create = res['vip']
self.driver.create_vip(ctx, VSE_ID, vip_create)
self.driver.delete_vip(ctx, vip_create['id'])
self.assertRaises(vcns_exc.VcnsNotFound,
self.driver.get_vip,
ctx,
vip_create['id'])
#Test Pool Operation
def test_create_and_get_pool(self):
ctx = context.get_admin_context()
with self.pool(no_delete=True) as p:
self.pool_id = p['pool']['id']
pool_create = p['pool']
self.driver.create_pool(ctx, VSE_ID, pool_create, [])
pool_get = self.driver.get_pool(ctx, pool_create['id'], VSE_ID)
for k, v in pool_get.iteritems():
self.assertEqual(pool_create[k], v)
def test_update_pool(self):
ctx = context.get_admin_context()
with self.pool(no_delete=True) as p:
self.pool_id = p['pool']['id']
pool_create = p['pool']
self.driver.create_pool(ctx, VSE_ID, pool_create, [])
pool_update = {'id': pool_create['id'],
'lb_method': 'lb_method',
'name': 'update_name',
'members': [],
'health_monitors': []}
self.driver.update_pool(ctx, VSE_ID, pool_update, [])
pool_get = self.driver.get_pool(ctx, pool_create['id'], VSE_ID)
for k, v in pool_get.iteritems():
if k in pool_update:
self.assertEqual(pool_update[k], v)
def test_delete_pool(self):
ctx = context.get_admin_context()
with self.pool(no_delete=True) as p:
self.pool_id = p['pool']['id']
pool_create = p['pool']
self.driver.create_pool(ctx, VSE_ID, pool_create, [])
self.driver.delete_pool(ctx, pool_create['id'], VSE_ID)
self.assertRaises(vcns_exc.VcnsNotFound,
self.driver.get_pool,
ctx,
pool_create['id'],
VSE_ID)
def test_create_and_get_monitor(self):
ctx = context.get_admin_context()
with self.health_monitor(no_delete=True) as m:
monitor_create = m['health_monitor']
self.driver.create_health_monitor(ctx, VSE_ID, monitor_create)
monitor_get = self.driver.get_health_monitor(
ctx, monitor_create['id'], VSE_ID)
for k, v in monitor_get.iteritems():
self.assertEqual(monitor_create[k], v)
def test_update_health_monitor(self):
ctx = context.get_admin_context()
with self.health_monitor(no_delete=True) as m:
monitor_create = m['health_monitor']
self.driver.create_health_monitor(
ctx, VSE_ID, monitor_create)
monitor_update = {'id': monitor_create['id'],
'delay': 'new_delay',
'timeout': "new_timeout",
'type': 'type',
'max_retries': "max_retries"}
self.driver.update_health_monitor(
ctx, VSE_ID, monitor_create, monitor_update)
monitor_get = self.driver.get_health_monitor(
ctx, monitor_create['id'], VSE_ID)
for k, v in monitor_get.iteritems():
if k in monitor_update:
self.assertEqual(monitor_update[k], v)
def test_delete_health_monitor(self):
ctx = context.get_admin_context()
with self.health_monitor(no_delete=True) as m:
monitor_create = m['health_monitor']
self.driver.create_health_monitor(ctx, VSE_ID, monitor_create)
self.driver.delete_health_monitor(
ctx, monitor_create['id'], VSE_ID)
self.assertRaises(vcns_exc.VcnsNotFound,
self.driver.get_health_monitor,
ctx,
monitor_create['id'],
VSE_ID)
|
|
#!/usr/bin/env python3
"""Create an archive in Simple Archive Format for batch import into DSpace."""
import collections
import csv
import os
import os.path
import shutil
import tkinter
import tkinter.messagebox
import xml.etree.ElementTree as ET
class CreateArchive():
"""Class for creating archive."""
def __init__(self, gs):
"""Class variables for CreateArchive class."""
self.csv_path = gs.csv_path
self.bit_path = gs.bit_path
self.archive_path = gs.archive_path
self.file_name_list = []
self.bit_name_list = []
self.duplicate_file_name_list = []
self.duplicate_bit_name_list = []
self.missing_file_list = []
self.create_zip = gs.create_zip
self.split_zip = gs.split_zip
self.zip_size = int(gs.zip_size)
self.zip_unit = gs.zip_unit
self.create_license = gs.create_license
self.license_file = gs.license_file
self.license_bundle = gs.license_bundle
self.license_text = gs.license_text
self.restrict_access = gs.restrict_access
self.group_name = gs.group_name
self.saf_folder_list = []
def create_file_name_list(self):
"""Create a list of file names from CSV file."""
with open(self.csv_path, 'r', encoding='utf-8') as csv_file:
reader = csv.reader(csv_file)
headers = next(reader)
for row in reader:
for header, data in zip(headers, row):
if header == 'filename':
for csv_file_name in data.split('||'):
self.file_name_list.append(csv_file_name)
return self.file_name_list
def create_bit_name_list(self):
"""Create a list of file names in Location of Files directory."""
for dirpath, dirnames, filenames in os.walk(self.bit_path):
for fname in filenames:
self.bit_name_list.append(fname)
return self.bit_name_list
def duplicate_file_name(self):
"""Create a list of duplicate file names from CSV file and handle error."""
self.create_file_name_list()
self.duplicate_file_name_list = [k for k, v in collections.Counter(
self.file_name_list).items() if v > 1]
if len(self.duplicate_file_name_list) != 0:
result = tkinter.messagebox.askquestion(
'Duplicate File Name Warning',
'There are duplicate file names in your CSV file.\n'
'\n'
'Would you like to proceed?\n'
)
if result == 'yes':
pass
else:
duplicate_file_name_string = 'Duplicate file names:\n'
for fname in self.duplicate_file_name_list:
duplicate_file_name_string += '\n' + fname
tkinter.messagebox.showwarning(
'Duplicate File Name Error',
'{}\n'.format(duplicate_file_name_string)
)
raise IOError
def duplicate_bit_name(self):
"""Create a list of duplicate file names in Location of Files directory and handle error."""
self.create_bit_name_list()
self.duplicate_bit_name_list = [k for k, v in collections.Counter(
self.bit_name_list).items() if k in self.file_name_list and v > 1]
if len(self.duplicate_bit_name_list) != 0:
duplicate_bit_name_string = 'The following files have the same name:\n'
for fname in self.duplicate_bit_name_list:
duplicate_bit_name_string += '\n' + fname
tkinter.messagebox.showwarning(
'Duplicate File Error',
'{}\n'.format(duplicate_bit_name_string)
)
raise IOError
def missing_files(self):
"""Create a list of file names in CSV file that are missing in Location of Files directory."""
self.missing_file_list = set.difference(
set(self.file_name_list), set(self.bit_name_list))
if len(self.missing_file_list) != 0:
missing_file_string = 'The following files were not found:\n'
for fname in self.missing_file_list:
missing_file_string += '\n' + fname
tkinter.messagebox.showwarning(
'Missing Files Error',
'{}\n'.format(missing_file_string)
)
raise IOError
def new_dir(self, saf_dir, row_num):
"""Create new directory for each DSpace record."""
os.makedirs(os.path.join(saf_dir, ('item_{}'.format(row_num))))
def change_dir(self, saf_dir, row_num):
"""Change current working directory to newly created directory."""
os.chdir(os.path.join(saf_dir, ('item_{}'.format(row_num))))
def write_license(self, contents_file):
"""Write license information to contents file and write license text to license file."""
contents_file.write('{}' '\t' 'BUNDLE:{}' '\n'.format(self.license_file,
self.license_bundle))
with open('{}'.format(self.license_file), 'w', encoding='utf-8') as license:
license.write('{}'.format(self.license_text))
def write_contents_file(self, data):
"""Write file names to contents file and copy files to directory."""
with open('contents', 'a', encoding='utf-8') as contents_file:
for csv_file_name in data.split('||'):
if self.restrict_access:
contents_file.write('{}' '\t' 'BUNDLE:ORIGINAL' '\t' 'permissions:-r {}' '\n'.format(
csv_file_name, self.group_name))
else:
contents_file.write('{}' '\t' 'BUNDLE:ORIGINAL' '\n'.format(csv_file_name))
for dirpath, dirnames, filenames in os.walk(self.bit_path):
for fname in filenames:
if csv_file_name == fname:
shutil.copy2(os.path.join(dirpath, fname), '.')
if self.create_license:
self.write_license(contents_file)
def write_dc_metadata(self, header_split, data_split):
"""Write metadata to dublin core file."""
for value in data_split:
dc_value = ET.Element('dcvalue')
dc_value.attrib['element'] = header_split[1]
if len(header_split) == 3:
dc_value.attrib['qualifier'] = header_split[2]
dc_value.text = value
if os.path.isfile('dublin_core.xml'):
with open('dublin_core.xml', 'a', encoding='utf-8') as dc_file:
dc_file.write(' {}' '\n'.format(
str(ET.tostring(dc_value, encoding='utf-8'), 'utf-8')))
else:
with open('dublin_core.xml', 'w', encoding='utf-8') as dc_file:
dc_file.write('<?xml version="1.0" encoding="UTF-8"?>' '\n')
dc_file.write('<dublin_core>' '\n')
dc_file.write(' {}' '\n'.format(
str(ET.tostring(dc_value, encoding='utf-8'), 'utf-8')))
def write_schema_metadata(self, schema_file, header_split, data_split, schema):
"""Write metadata to schema files other than dublin core."""
for value in data_split:
dc_value = ET.Element('dcvalue')
dc_value.attrib['element'] = header_split[1]
if len(header_split) == 3:
dc_value.attrib['qualifier'] = header_split[2]
dc_value.text = value
if os.path.isfile(schema_file):
with open(schema_file, 'a', encoding='utf-8') as dc_file:
dc_file.write(' {}' '\n'.format(
str(ET.tostring(dc_value, encoding='utf-8'), 'utf-8')))
else:
with open(schema_file, 'a', encoding='utf-8') as dc_file:
dc_file.write('<?xml version="1.0" encoding="UTF-8"?>' '\n')
dc_file.write('<dublin_core schema="{}">' '\n'.format(schema))
dc_file.write(' {}' '\n'.format(
str(ET.tostring(dc_value, encoding='utf-8'), 'utf-8')))
def write_closing_tag(self):
"""Write closing tag to each xml file."""
for file_name in os.listdir('.'):
if file_name.endswith('xml'):
with open(file_name, 'a', encoding='utf-8') as dc_file:
dc_file.write('</dublin_core>' '\n')
def create_files(self, saf_dir, row_num, headers, row):
"""Write CSV metadata to appropriate files."""
self.new_dir(saf_dir, row_num)
self.change_dir(saf_dir, row_num)
for header, data in zip(headers, row):
header_split = header.split('.')
schema = header_split[0]
data_split = data.split('||')
schema_file = 'metadata_{}.xml'.format(header_split[0])
if data:
if header_split[0] == 'filename':
self.write_contents_file(data)
elif header_split[0] == 'dc':
self.write_dc_metadata(header_split, data_split)
else:
self.write_schema_metadata(schema_file,
header_split,
data_split,
schema)
self.write_closing_tag()
def open_csv(self):
"""Read CSV file if Split ZIP is not selected."""
saf_folder_name = 'SimpleArchiveFormat'
self.saf_folder_list.append(saf_folder_name)
saf_dir = os.path.join(self.archive_path, saf_folder_name)
with open(self.csv_path, 'r', encoding='utf-8') as csv_file:
reader = csv.reader(csv_file)
headers = next(reader)
row_num = 1
for row in reader:
self.create_files(saf_dir, row_num, headers, row)
row_num += 1
def open_csv_split(self):
"""Read CSV file if Split ZIP is selected."""
with open(self.csv_path, 'r', encoding='utf-8') as csv_file:
reader = csv.reader(csv_file)
headers = next(reader)
saf_folder_number = 1
row_num = 1
total_size = 0
if self.zip_unit == 'MB':
zip_size = self.zip_size * 1000000
else:
zip_size = self.zip_size * 1000000000
for row in reader:
saf_folder_name = 'SimpleArchiveFormat{}'.format(saf_folder_number)
saf_dir = os.path.join(self.archive_path, saf_folder_name)
self.create_files(saf_dir, row_num, headers, row)
files = [f for f in os.listdir('.') if os.path.isfile(f)]
for f in files:
total_size += os.path.getsize(f)
if total_size >= zip_size:
saf_folder_number += 1
saf_dir = os.path.join(self.archive_path, saf_folder_name)
cur_dir = os.getcwd()
cur_folder = os.path.split(cur_dir)[-1]
new_dir = os.path.join(saf_dir, cur_folder)
shutil.move(cur_dir, new_dir)
total_size = 0
files = [f for f in os.listdir('.') if os.path.isfile(f)]
for f in files:
total_size += os.path.getsize(f)
if saf_folder_name not in self.saf_folder_list:
self.saf_folder_list.append(saf_folder_name)
row_num += 1
def zip_archive(self):
"""Create ZIP files for all archive directories."""
dst_folder_list = os.listdir(self.archive_path)
for folder in dst_folder_list:
folder_path = os.path.join(self.archive_path, folder)
if folder in self.saf_folder_list and os.path.isdir(folder_path):
shutil.make_archive(folder_path, 'zip', folder_path)
|
|
#!/usr/bin/env python3
# Copyright (c) 2007-8, Playful Invention Company
# Copyright (c) 2008-14, Walter Bender
# Copyright (c) 2011 Collabora Ltd. <http://www.collabora.co.uk/>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import cairo
import getopt
import sys
import os
import os.path
import io
import errno
import configparser
import tarfile
import tempfile
import subprocess
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GLib
from gi.repository import GdkPixbuf
from gi.repository import Gio
try:
# Try to use XDG Base Directory standard for config files.
import xdg.BaseDirectory
CONFIG_HOME = os.path.join(xdg.BaseDirectory.xdg_config_home, "turtleart")
except ImportError:
# Default to `.config` per the spec.
CONFIG_HOME = os.path.expanduser(os.path.join("~", ".config", "turtleart"))
argv = sys.argv[:] # Workaround for import behavior of gst in tagplay
sys.argv[1:] = [] # Execution of import gst cannot see '--help' or '-h'
import gettext
from gettext import gettext as _
from TurtleArt.taconstants import (
OVERLAY_LAYER,
DEFAULT_TURTLE_COLORS,
TAB_LAYER,
SUFFIX,
TMP_SVG_PATH,
TMP_ODP_PATH,
PASTE_OFFSET,
)
from TurtleArt.tautils import (
data_from_string,
get_load_name,
get_path,
get_save_name,
is_writeable,
)
from TurtleArt.tapalette import default_values
from TurtleArt.tawindow import TurtleArtWindow
from TurtleArt.taexportlogo import save_logo
from TurtleArt.taexportpython import save_python
from TurtleArt.taprimitive import PyExportError
from TurtleArt.taplugin import (
load_a_plugin,
cancel_plugin_install,
complete_plugin_install,
)
from TurtleArt.util.menubuilder import (
make_menu_item,
make_sub_menu,
make_checkmenu_item,
)
class TurtleMain:
""" Launch Turtle Art in GNOME (from outside of Sugar). """
_INSTALL_PATH = "/usr/share/sugar/activities/TurtleArt.activity"
_ALTERNATIVE_INSTALL_PATH = "/usr/local/share/sugar/activities/TurtleArt.activity"
_ICON_SUBPATH = "images/turtle.png"
_GNOME_PLUGIN_SUBPATH = "gnome_plugins"
_GIO_SETTINGS = "org.laptop.TurtleArtActivity"
_HOVER_HELP = "hover-help"
_ORIENTATION = "palette-orientation"
_COORDINATE_SCALE = "coordinate-scale"
_PLUGINS_LIST = "plugins"
def __init__(self, lib_path, share_path):
self._gio_settings_overrides = False
self._lib_path = lib_path
self._share_path = share_path
self._abspath = os.path.abspath(".")
file_activity_info = configparser.ConfigParser()
activity_info_path = os.path.join(share_path, "activity/activity.info")
file_activity_info.read(activity_info_path)
bundle_id = file_activity_info.get("Activity", "bundle_id")
self.version = file_activity_info.get("Activity", "activity_version")
self.name = file_activity_info.get("Activity", "name")
self.summary = file_activity_info.get("Activity", "summary")
self.website = file_activity_info.get("Activity", "url")
self.icon_name = file_activity_info.get("Activity", "icon")
path = os.path.join(share_path, "locale")
if os.path.isdir(path):
gettext.bindtextdomain(bundle_id, path)
gettext.textdomain(bundle_id)
global _
_ = gettext.gettext
self._HELP_MSG = (
"turtleblocks.py: "
+ _("usage is")
+ """
\tturtleblocks.py
\tturtleblocks.py project.tb
\tturtleblocks.py --output_png project.tb
\tturtleblocks.py -o project
\tturtleblocks.py --run project.tb
\tturtleblocks.py -r project"""
)
self._init_vars()
self._parse_command_line()
self._ensure_sugar_paths()
self._gnome_plugins = []
self._selected_sample = None
self._sample_window = None
if self._output_png:
# Outputing to file, so no need for a canvas
self.canvas = None
self._get_gconf_settings()
self._build_window(interactive=False)
self._draw_and_quit()
else:
self._read_initial_pos()
self._init_gnome_plugins()
self._get_gio_settings()
self._setup_gtk()
self._build_window()
self._run_gnome_plugins()
self._start_gtk()
def _get_local_settings(self, activity_root):
"""return an activity-specific Gio.Settings"""
# create compiled schema file if missing from activity root
compiled = os.path.join(activity_root, "gschemas.compiled")
if not os.access(compiled, os.R_OK):
# create schemas directory if missing
path = os.path.join(get_path(None, "data"), "schemas")
if not os.access(path, os.F_OK):
os.makedirs(path)
# create compiled schema file if missing
compiled = os.path.join(path, "gschemas.compiled")
if not os.access(compiled, os.R_OK):
src = "%s.gschema.xml" % self._GIO_SETTINGS
lines = open(os.path.join(activity_root, src), "r").readlines()
open(os.path.join(path, src), "w").writelines(lines)
os.system("glib-compile-schemas %s" % path)
os.remove(os.path.join(path, src))
schemas_path = path
else:
schemas_path = activity_root
# create a local Gio.Settings based on the compiled schema
source = Gio.SettingsSchemaSource.new_from_directory(schemas_path, None, True)
schema = source.lookup(self._GIO_SETTINGS, True)
_settings = Gio.Settings.new_full(schema, None, None)
return _settings
def _get_gio_settings(self):
self._settings = self._get_local_settings(self._share_path)
def get_config_home(self):
return CONFIG_HOME
def _get_gnome_plugin_home(self):
""" Use plugin directory associated with execution path. """
if os.path.exists(os.path.join(self._lib_path, self._GNOME_PLUGIN_SUBPATH)):
return os.path.join(self._lib_path, self._GNOME_PLUGIN_SUBPATH)
else:
return None
def _get_plugin_candidates(self, path):
""" Look for plugin files in plugin directory. """
plugin_files = []
if path is not None:
candidates = os.listdir(path)
for c in candidates:
if c[-10:] == "_plugin.py" and c[0] != "#" and c[0] != ".":
plugin_files.append(c.split(".")[0])
return plugin_files
def _init_gnome_plugins(self):
""" Try launching any plugins we may have found. """
for p in self._get_plugin_candidates(self._get_gnome_plugin_home()):
P = p.capitalize()
f = (
"def f(self): from gnome_plugins.%s import %s; \
return %s(self)"
% (p, P, P)
)
plugin = {}
try:
exec(f, globals(), plugin)
self._gnome_plugins.append(list(plugin.values())[0](self))
except ImportError as e:
print("failed to import %s: %s" % (P, str(e)))
except ValueError as e:
print("failed to import %s: %s" % (P, str(e)))
def _run_gnome_plugins(self):
""" Tell the plugin about the TurtleWindow instance. """
for p in self._gnome_plugins:
p.set_tw(self.tw)
def _mkdir_p(self, path):
"""Create a directory in a fashion similar to `mkdir -p`."""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
def _makepath(self, path):
""" Make a path if it doesn't previously exist """
dpath = os.path.normpath(os.path.dirname(path))
if not os.path.exists(dpath):
os.makedirs(dpath)
def _start_gtk(self):
""" Get a main window set up. """
self.win.connect("configure_event", self.tw.update_overlay_position)
self.tw.parent = self.win
self.init_complete = True
if self._ta_file is None:
self.tw.load_start()
else:
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.WATCH))
GLib.idle_add(self._project_loader, self._ta_file)
self._set_gio_settings_overrides()
Gtk.main()
def _project_loader(self, file_name):
self.tw.load_start(self._ta_file)
self.tw.lc.trace = 0
if self._run_on_launch:
self._do_run_cb()
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.LEFT_PTR))
def _draw_and_quit(self):
"""Non-interactive mode: run the project, save it to a file
and quit."""
self.tw.load_start(self._ta_file)
self.tw.lc.trace = 0
self.tw.run_button(0, running_from_button_push=True)
self.tw.save_as_image(self._ta_file)
def _build_window(self, interactive=True):
""" Initialize the TurtleWindow instance. """
if interactive:
win = self.canvas.get_window()
cr = win.cairo_create()
surface = cr.get_target()
else:
img_surface = cairo.ImageSurface(cairo.FORMAT_RGB24, 1024, 768)
cr = cairo.Context(img_surface)
surface = cr.get_target()
self.turtle_canvas = surface.create_similar(
cairo.CONTENT_COLOR,
# max(1024, Gdk.Screen.width() * 2),
# max(768, Gdk.Screen.height() * 2))
Gdk.Screen.width() * 2,
Gdk.Screen.height() * 2,
)
# Make sure the autosave directory is writeable
if is_writeable(self._share_path):
self._autosavedirname = self._share_path
else:
self._autosavedirname = os.path.expanduser("~")
self.tw = TurtleArtWindow(
self.canvas,
self._lib_path,
self._share_path,
turtle_canvas=self.turtle_canvas,
activity=self,
running_sugar=False,
)
self.tw.save_folder = self._abspath # os.path.expanduser('~')
if interactive:
if self._settings.get_int(self._HOVER_HELP) == 1:
self.tw.no_help = True
self.hover.set_active(False)
self._do_hover_help_off_cb()
if not self._settings.get_int(self._COORDINATE_SCALE) in [0, 1]:
self.tw.coord_scale = 1
else:
self.tw.coord_scale = 0
if self._settings.get_int(self._ORIENTATION) == 1:
self.tw.orientation = 1
else:
self.tw.coord_scale = 1
def _set_gio_settings_overrides(self):
if self.tw.coord_scale == 0:
self.tw.coord_scale = 1
else:
self._do_rescale_cb(None)
if self.tw.coord_scale != 1:
self._gio_settings_overrides = True
self.coords.set_active(True)
self._gio_settings_overrides = False
def _init_vars(self):
"""If we are invoked to start a project from Gnome, we should make
sure our current directory is TA's source dir."""
self._ta_file = None
self._output_png = False
self._run_on_launch = False
self.current_palette = 0
self.scale = 2.0
self.tw = None
self.init_complete = False
def _parse_command_line(self):
""" Try to make sense of the command-line arguments. """
try:
opts, args = getopt.getopt(argv[1:], "hor", ["help", "output_png", "run"])
except getopt.GetoptError as err:
print(str(err))
print(self._HELP_MSG)
sys.exit(2)
self._run_on_launch = False
for o, a in opts:
if o in ("-h", "--help"):
print(self._HELP_MSG)
sys.exit()
if o in ("-o", "--output_png"):
self._output_png = True
elif o in ("-r", "--run"):
self._run_on_launch = True
else:
assert False, _("No option action:") + " " + o
if args:
self._ta_file = args[0]
if len(args) > 1 or self._output_png and self._ta_file is None:
print(self._HELP_MSG)
sys.exit()
if self._ta_file is not None:
if not self._ta_file.endswith(SUFFIX):
self._ta_file += ".tb"
if not os.path.exists(self._ta_file):
self._ta_file = os.path.join(self._abspath, self._ta_file)
if not os.path.exists(self._ta_file):
assert False, "%s: %s" % (self._ta_file, _("File not found"))
def _ensure_sugar_paths(self):
""" Make sure Sugar paths are present. """
tapath = os.path.join(
os.environ["HOME"], ".sugar", "default", "org.laptop.TurtleArtActivity"
)
list(
map(
self._makepath,
(os.path.join(tapath, "data/"), os.path.join(tapath, "instance/")),
)
)
def _read_initial_pos(self):
""" Read saved configuration. """
try:
data_file = open(os.path.join(CONFIG_HOME, "turtleartrc"), "r")
except IOError:
# Opening the config file failed
# We'll assume it needs to be created
try:
self._mkdir_p(CONFIG_HOME)
data_file = open(os.path.join(CONFIG_HOME, "turtleartrc"), "a+")
except IOError as e:
# We can't write to the configuration file, use
# a faux file that will persist for the length of
# the session.
print(_("Configuration directory not writable: %s") % (e))
data_file = io.StringIO()
data_file.write(str(50) + "\n")
data_file.write(str(50) + "\n")
data_file.write(str(800) + "\n")
data_file.write(str(550) + "\n")
data_file.seek(0)
try:
self.x = int(data_file.readline())
self.y = int(data_file.readline())
self.width = int(data_file.readline())
self.height = int(data_file.readline())
except ValueError:
self.x = 50
self.y = 50
self.width = 800
self.height = 550
def _fixed_resize_cb(self, widget=None, rect=None):
"""If a toolbar opens or closes, we need to resize the vbox
holding out scrolling window."""
self.vbox.set_size_request(rect.width, rect.height)
self.menu_height = self.menu_bar.get_size_request()[1]
def restore_cursor(self):
""" No longer copying or sharing, so restore standard cursor. """
self.tw.copying_blocks = False
self.tw.sharing_blocks = False
self.tw.saving_blocks = False
self.tw.deleting_blocks = False
if hasattr(self, "get_window"):
if hasattr(self.get_window(), "get_cursor"):
self.get_window().set_cursor(self._old_cursor)
else:
self.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.LEFT_PTR))
def _setup_gtk(self):
""" Set up a scrolled window in which to run Turtle Blocks. """
win = Gtk.Window(Gtk.WindowType.TOPLEVEL)
win.set_default_size(self.width, self.height)
win.move(self.x, self.y)
win.maximize()
win.set_title("%s %s" % (self.name, str(self.version)))
if os.path.exists(os.path.join(self._share_path, self._ICON_SUBPATH)):
win.set_icon_from_file(os.path.join(self._share_path, self._ICON_SUBPATH))
win.show()
win.connect("delete_event", self._quit_ta)
""" Create a scrolled window to contain the turtle canvas. We
add a Fixed container in order to position text Entry widgets
on top of string and number blocks."""
self.fixed = Gtk.Fixed()
self.fixed.connect("size-allocate", self._fixed_resize_cb)
width = Gdk.Screen.width() - 80
height = Gdk.Screen.height() - 80
self.fixed.set_size_request(width, height)
self.vbox = Gtk.VBox(False, 0)
self.vbox.show()
self.menu_bar = self._get_menu_bar()
self.vbox.pack_start(self.menu_bar, False, False, 0)
self.menu_bar.show()
self.menu_height = self.menu_bar.get_size_request()[1]
self.sw = Gtk.ScrolledWindow()
self.sw.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.sw.show()
canvas = Gtk.DrawingArea()
width = Gdk.Screen.width() * 2
height = Gdk.Screen.height() * 2
canvas.set_size_request(width, height)
self.sw.add_with_viewport(canvas)
canvas.show()
self.vbox.pack_end(self.sw, True, True, 0)
self.fixed.put(self.vbox, 0, 0)
self.fixed.show()
win.add(self.fixed)
win.show_all()
self.win = win
self.canvas = canvas
def _get_menu_bar(self):
""" Instead of Sugar toolbars, use GNOME menus. """
menu = Gtk.Menu()
make_menu_item(menu, _("New"), self._do_new_cb)
make_menu_item(menu, _("Show sample projects"), self._create_store)
make_menu_item(menu, _("Open"), self._do_open_cb)
make_menu_item(menu, _("Add project"), self._do_load_cb)
make_menu_item(menu, _("Load plugin"), self._do_load_plugin_cb)
make_menu_item(menu, _("Save"), self._do_save_cb)
make_menu_item(menu, _("Save as"), self._do_save_as_cb)
# export submenu
export_submenu = Gtk.Menu()
export_menu = make_sub_menu(export_submenu, _("Export as"))
menu.append(export_menu)
make_menu_item(export_submenu, _("image"), self._do_save_picture_cb)
make_menu_item(
export_submenu, _("image (blocks)"), self._do_save_blocks_image_cb
)
make_menu_item(export_submenu, _("SVG"), self._do_save_svg_cb)
make_menu_item(export_submenu, _("icon"), self._do_save_as_icon_cb)
# TRANS: ODP is Open Office presentation
make_menu_item(export_submenu, _("ODP"), self._do_save_as_odp_cb)
make_menu_item(export_submenu, _("Logo"), self._do_save_logo_cb)
make_menu_item(export_submenu, _("Python"), self._do_save_python_cb)
make_menu_item(menu, _("Quit"), self._quit_ta)
activity_menu = make_sub_menu(menu, _("File"))
menu = Gtk.Menu()
make_menu_item(menu, _("Cartesian coordinates"), self._do_cartesian_cb)
make_menu_item(menu, _("Polar coordinates"), self._do_polar_cb)
self.coords = make_checkmenu_item(
menu, _("Rescale coordinates"), self._do_rescale_cb, status=False
)
make_menu_item(menu, _("Grow blocks"), self._do_resize_cb, 1.5)
make_menu_item(menu, _("Shrink blocks"), self._do_resize_cb, 0.667)
make_menu_item(menu, _("Reset block size"), self._do_resize_cb, -1)
self.hover = make_checkmenu_item(
menu, _("Turn on hover help"), self._do_toggle_hover_help_cb, status=True
)
view_menu = make_sub_menu(menu, _("View"))
menu = Gtk.Menu()
make_menu_item(menu, _("Copy"), self._do_copy_cb)
make_menu_item(menu, _("Paste"), self._do_paste_cb)
make_menu_item(menu, _("Save stack"), self._do_save_macro_cb)
make_menu_item(menu, _("Delete stack"), self._do_delete_macro_cb)
edit_menu = make_sub_menu(menu, _("Edit"))
menu = Gtk.Menu()
make_menu_item(menu, _("Show palette"), self._do_palette_cb)
make_menu_item(menu, _("Hide palette"), self._do_hide_palette_cb)
make_menu_item(menu, _("Show/hide blocks"), self._do_hideshow_cb)
tool_menu = make_sub_menu(menu, _("Tools"))
menu = Gtk.Menu()
make_menu_item(menu, _("Clean"), self._do_eraser_cb)
make_menu_item(menu, _("Run"), self._do_run_cb)
make_menu_item(menu, _("Step"), self._do_step_cb)
make_menu_item(menu, _("Debug"), self._do_trace_cb)
make_menu_item(menu, _("Stop"), self._do_stop_cb)
turtle_menu = make_sub_menu(menu, _("Turtle"))
self._plugin_menu = Gtk.Menu()
plugin_men = make_sub_menu(self._plugin_menu, _("Plugins"))
menu = Gtk.Menu()
make_menu_item(menu, _("About..."), self._do_about_cb)
help_menu = make_sub_menu(menu, _("Help"))
menu_bar = Gtk.MenuBar()
menu_bar.append(activity_menu)
menu_bar.append(edit_menu)
menu_bar.append(view_menu)
menu_bar.append(tool_menu)
menu_bar.append(turtle_menu)
menu_bar.append(plugin_men)
# Add menus for plugins
for p in self._gnome_plugins:
menu_item = p.get_menu()
if menu_item is not None:
menu_bar.append(menu_item)
menu_bar.append(help_menu)
return menu_bar
def _quit_ta(self, widget=None, e=None):
""" Save changes on exit """
project_empty = self.tw.is_project_empty()
if not project_empty:
resp = self._show_save_dialog(e is None)
if resp == Gtk.ResponseType.YES:
if self.tw.is_new_project():
self._save_as()
else:
if self.tw.project_has_changed():
self._save_changes()
elif resp == Gtk.ResponseType.CANCEL:
return
if hasattr(self, "_settings"):
self._settings.set_int(self._ORIENTATION, self.tw.orientation)
for plugin in list(self.tw.turtleart_plugins.values()):
if hasattr(plugin, "quit"):
plugin.quit()
# Clean up temporary files
try:
os.remove(TMP_SVG_PATH)
except BaseException:
pass
try:
os.remove(TMP_ODP_PATH)
except BaseException:
pass
Gtk.main_quit()
exit()
def _show_save_dialog(self, add_cancel=False):
""" Dialog for save project """
dlg = Gtk.MessageDialog(
parent=None,
type=Gtk.MessageType.INFO,
buttons=Gtk.ButtonsType.YES_NO,
message_format=_(
"You have unsaved work. \
Would you like to save before quitting?"
),
)
dlg.set_default_response(Gtk.ResponseType.YES)
if add_cancel:
dlg.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL)
dlg.set_title(_("Save project?"))
dlg.set_property("skip-taskbar-hint", False)
resp = dlg.run()
dlg.destroy()
return resp
def _reload_plugin_alert(
self, tmp_dir, tmp_path, plugin_path, plugin_name, file_info
):
print("Already installed")
title = _("Plugin %s already installed") % plugin_name
msg = _("Do you want to reinstall %s?") % plugin_name
dlg = Gtk.MessageDialog(
parent=None,
type=Gtk.MessageType.INFO,
buttons=Gtk.ButtonsType.YES_NO,
message_format=title,
)
dlg.format_secondary_text(msg)
dlg.set_title(title)
dlg.set_property("skip-taskbar-hint", False)
resp = dlg.run()
dlg.destroy()
if resp is Gtk.ResponseType.OK:
complete_plugin_install(
self, tmp_dir, tmp_path, plugin_path, plugin_name, file_info
)
elif resp is Gtk.ResponseType.CANCEL:
cancel_plugin_install(tmp_dir)
def _do_new_cb(self, widget):
""" Callback for new project. """
self.tw.new_project()
self.tw.load_start()
def _do_open_cb(self, widget):
""" Callback for open project. """
self.tw.load_file_from_chooser(True)
def _do_load_cb(self, widget):
""" Callback for load project (add to current project). """
self.tw.load_file_from_chooser(False)
def _do_load_plugin_cb(self, widget):
file_path, loaddir = get_load_name(".tar.gz")
if file_path is None:
return
try:
# Copy to tmp file since some systems had trouble
# with gunzip directly from datastore
datapath = get_path(None, "instance")
if not os.path.exists(datapath):
os.makedirs(datapath)
tmpfile = os.path.join(datapath, "tmpfile.tar.gz")
subprocess.call(["cp", file_path, tmpfile])
status = subprocess.call(["gunzip", tmpfile])
if status == 0:
tar_fd = tarfile.open(tmpfile[:-3], "r")
else:
tar_fd = tarfile.open(tmpfile, "r")
except BaseException:
tar_fd = tarfile.open(file_path, "r")
tmp_dir = tempfile.mkdtemp()
try:
tar_fd.extractall(tmp_dir)
load_a_plugin(self, tmp_dir)
self.restore_cursor()
except BaseException:
self.restore_cursor()
finally:
tar_fd.close()
# Remove tmpfile.tar
subprocess.call(["rm", os.path.join(datapath, "tmpfile.tar")])
def _do_save_cb(self, widget):
""" Callback for save project. """
self.tw.save_file(self._ta_file)
def _do_save_as_cb(self, widget):
""" Callback for save-as project. """
self._save_as()
def autosave(self):
""" Autosave is called each type the run button is pressed """
temp_load_save_folder = self.tw.load_save_folder
temp_save_folder = self.tw.save_folder
self.tw.load_save_folder = self._autosavedirname
self.tw.save_folder = self._autosavedirname
self.tw.save_file(file_name=os.path.join(self._autosavedirname, "autosave.tb"))
self.tw.save_folder = temp_save_folder
self.tw.load_save_folder = temp_load_save_folder
def _save_as(self):
""" Save as is called from callback and quit """
self.tw.save_file_name = self._ta_file
self.tw.save_file()
def _save_changes(self):
""" Save changes to current project """
self.tw.save_file_name = self._ta_file
self.tw.save_file(self.tw._loaded_project)
def _do_save_blocks_image_cb(self, widget):
""" Callback for save blocks as image. """
self.tw.save_blocks_as_image()
def _do_save_picture_cb(self, widget):
""" Callback for save canvas. """
self.tw.save_as_image()
def _do_save_svg_cb(self, widget):
""" Callback for save canvas as SVG. """
self.tw.save_as_image(svg=True)
def _do_save_as_icon_cb(self, widget):
""" Callback for save canvas. """
self.tw.write_svg_operation()
self.tw.save_as_icon()
def _do_save_as_odp_cb(self, widget):
""" Callback for save canvas. """
self.tw.save_as_odp()
def _do_save_logo_cb(self, widget):
""" Callback for save project to Logo. """
logocode = save_logo(self.tw)
if len(logocode) == 0:
return
save_type = ".lg"
filename, self.tw.load_save_folder = get_save_name(
save_type, None, "logosession"
)
if isinstance(filename, str):
filename = filename.encode("utf-8")
if filename is not None:
f = open(filename, "w")
f.write(logocode)
f.close()
def _do_save_python_cb(self, widget):
""" Callback for saving the project as Python code. """
# catch PyExportError and display a user-friendly message instead
try:
pythoncode = save_python(self.tw)
except PyExportError as pyee:
if pyee.block is not None:
pyee.block.highlight()
self.tw.showlabel("status", str(pyee))
print(pyee)
return
if not pythoncode:
return
# use name of TA project if it has been saved already
default_name = self.tw.save_file_name
if default_name is None:
default_name = _("myproject")
elif default_name.endswith(".ta") or default_name.endswith(".tb"):
default_name = default_name[:-3]
save_type = ".py"
filename, self.tw.load_save_folder = get_save_name(
save_type, None, default_name
)
if isinstance(filename, str):
filename = filename.encode("utf-8")
if filename is not None:
f = open(filename, "w")
f.write(pythoncode)
f.close()
def _do_resize_cb(self, widget, factor):
""" Callback to resize blocks. """
if factor == -1:
self.tw.block_scale = 2.0
else:
self.tw.block_scale *= factor
self.tw.resize_blocks()
def _do_cartesian_cb(self, button):
""" Callback to display/hide Cartesian coordinate overlay. """
self.tw.set_cartesian(True)
def _do_polar_cb(self, button):
""" Callback to display/hide Polar coordinate overlay. """
self.tw.set_polar(True)
def _do_rescale_cb(self, button):
""" Callback to rescale coordinate space. """
if self._gio_settings_overrides:
return
if self.tw.coord_scale == 1:
self.tw.coord_scale = self.tw.height / 40
self.tw.update_overlay_position()
if self.tw.cartesian is True:
self.tw.overlay_shapes["Cartesian_labeled"].hide()
self.tw.overlay_shapes["Cartesian"].set_layer(OVERLAY_LAYER)
default_values["forward"] = [10]
default_values["back"] = [10]
default_values["arc"] = [90, 10]
default_values["setpensize"] = [1]
self.tw.turtles.get_active_turtle().set_pen_size(1)
else:
self.tw.coord_scale = 1
if self.tw.cartesian is True:
self.tw.overlay_shapes["Cartesian"].hide()
self.tw.overlay_shapes["Cartesian_labeled"].set_layer(OVERLAY_LAYER)
default_values["forward"] = [100]
default_values["back"] = [100]
default_values["arc"] = [90, 100]
default_values["setpensize"] = [5]
self.tw.turtles.get_active_turtle().set_pen_size(5)
if hasattr(self, "_settings"):
self._settings.set_int(self._COORDINATE_SCALE, int(self.tw.coord_scale))
self.tw.recalculate_constants()
def _do_toggle_hover_help_cb(self, button):
""" Toggle hover help on/off """
self.tw.no_help = not button.get_active()
if self.tw.no_help:
self._do_hover_help_off_cb()
else:
self._do_hover_help_on_cb()
def _do_toggle_plugin_cb(self, button):
name = button.get_label()
if hasattr(self, "_settings"):
plugins_list = self._settings.get_string(self._PLUGINS_LIST)
plugins = plugins_list.split(",")
if button.get_active():
if name not in plugins:
plugins.append(name)
self._settings.set_string(self._PLUGINS_LIST, ",".join(plugins))
label = _("Please restart %s in order to use the plugin.") % self.name
else:
if name in plugins:
plugins.remove(name)
self._settings.set_string(self._PLUGINS_LIST, ",".join(plugins))
label = (
_("Please restart %s in order to unload the plugin.") % self.name
)
self.tw.showlabel("status", label)
def _do_hover_help_on_cb(self):
""" Turn hover help on """
if hasattr(self, "_settings"):
self._settings.set_int(self._HOVER_HELP, 0)
def _do_hover_help_off_cb(self):
""" Turn hover help off """
self.tw.last_label = None
if self.tw.status_spr is not None:
self.tw.status_spr.hide()
if hasattr(self, "_settings"):
self._settings.set_int(self._HOVER_HELP, 1)
def _do_palette_cb(self, widget):
""" Callback to show/hide palette of blocks. """
self.tw.show_palette(self.current_palette)
def _do_hide_palette_cb(self, widget):
""" Hide the palette of blocks. """
self.tw.hide_palette()
def _do_hideshow_cb(self, widget):
""" Hide/show the blocks. """
self.tw.hideshow_button()
def _do_eraser_cb(self, widget):
""" Callback for eraser button. """
self.tw.eraser_button()
return
def _do_run_cb(self, widget=None):
""" Callback for run button (rabbit). """
self.tw.lc.trace = 0
self.tw.hideblocks()
self.tw.display_coordinates(clear=True)
self.tw.toolbar_shapes["stopiton"].set_layer(TAB_LAYER)
self.tw.run_button(0, running_from_button_push=True)
return
def _do_step_cb(self, widget):
""" Callback for step button (turtle). """
self.tw.lc.trace = 1
self.tw.run_button(3, running_from_button_push=True)
return
def _do_trace_cb(self, widget):
""" Callback for debug button (bug). """
self.tw.lc.trace = 1
self.tw.run_button(9, running_from_button_push=True)
return
def _do_stop_cb(self, widget):
""" Callback for stop button. """
if self.tw.running_blocks:
self.tw.toolbar_shapes["stopiton"].hide()
if self.tw.hide:
self.tw.showblocks()
self.tw.stop_button()
self.tw.display_coordinates()
def _do_save_macro_cb(self, widget):
""" Callback for save stack button. """
self.tw.copying_blocks = False
self.tw.deleting_blocks = False
if self.tw.saving_blocks:
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.LEFT_PTR))
self.tw.saving_blocks = False
else:
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.HAND1))
self.tw.saving_blocks = True
def _do_delete_macro_cb(self, widget):
""" Callback for delete stack button. """
self.tw.copying_blocks = False
self.tw.saving_blocks = False
if self.tw.deleting_blocks:
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.LEFT_PTR))
self.tw.deleting_blocks = False
else:
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.HAND1))
self.tw.deleting_blocks = True
def _do_copy_cb(self, button):
""" Callback for copy button. """
self.tw.saving_blocks = False
self.tw.deleting_blocks = False
if self.tw.copying_blocks:
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.LEFT_PTR))
self.tw.copying_blocks = False
else:
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.HAND1))
self.tw.copying_blocks = True
def _do_paste_cb(self, button):
""" Callback for paste button. """
self.tw.copying_blocks = False
self.tw.saving_blocks = False
self.tw.deleting_blocks = False
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.LEFT_PTR))
clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
text = clipboard.wait_for_text()
if text is not None:
if (
self.tw.selected_blk is not None
and self.tw.selected_blk.name == "string"
and text[0:2] != "[["
): # Don't paste block data into a string
self.tw.paste_text_in_block_label(text)
self.tw.selected_blk.resize()
else:
self.tw.process_data(data_from_string(text), self.tw.paste_offset)
self.tw.paste_offset += PASTE_OFFSET
def _do_about_cb(self, widget):
about = Gtk.AboutDialog()
about.set_program_name(_(self.name))
about.set_version(self.version)
about.set_comments(_(self.summary))
about.set_website(self.website)
logo_path = os.path.join(self._share_path, "activity", self.icon_name + ".svg")
about.set_logo(GdkPixbuf.Pixbuf.new_from_file(logo_path))
about.run()
about.destroy()
def _window_event(self, event, data):
""" Callback for resize event. """
data_file = open(".turtleartrc", "w")
data_file.write(str(data.x) + "\n")
data_file.write(str(data.y) + "\n")
data_file.write(str(data.width) + "\n")
data_file.write(str(data.height) + "\n")
def nick_changed(self, nick):
""" TODO: Rename default turtle in dictionary """
pass
def color_changed(self, colors):
""" Reskin turtle with collaboration colors """
turtle = self.tw.turtles.get_turtle(self.tw._default_turtle_name)
try:
turtle.colors = colors.split(",")
except BaseException:
turtle.colors = DEFAULT_TURTLE_COLORS
turtle.custom_shapes = True # Force regeneration of shapes
turtle.reset_shapes()
turtle.show()
def _get_execution_dir(self):
""" From whence is the program being executed? """
dirname = os.path.dirname(__file__)
if dirname == "":
if os.path.exists(os.path.join("~", "Activities", "TurtleArt.activity")):
return os.path.join("~", "Activities", "TurtleArt.activity")
elif os.path.exists(self._INSTALL_PATH):
return self._INSTALL_PATH
elif os.path.exists(self._ALTERNATIVE_INSTALL_PATH):
return self._ALTERNATIVE_INSTALL_PATH
else:
return os.path.abspath(".")
else:
return os.path.abspath(dirname)
def restore_state(self):
""" Anything that needs restoring after a clear screen can go here """
pass
def hide_store(self, widget=None):
if self._sample_window is not None:
self._sample_box.hide()
def _create_store(self, widget=None):
if self._sample_window is None:
self._sample_box = Gtk.EventBox()
self._sample_window = Gtk.ScrolledWindow()
self._sample_window.set_policy(
Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC
)
width = Gdk.Screen.width() / 2
height = Gdk.Screen.height() / 2
self._sample_window.set_size_request(width, height)
self._sample_window.show()
store = Gtk.ListStore(GdkPixbuf.Pixbuf, str)
icon_view = Gtk.IconView()
icon_view.set_model(store)
icon_view.set_selection_mode(Gtk.SelectionMode.SINGLE)
icon_view.connect("selection-changed", self._sample_selected, store)
icon_view.set_pixbuf_column(0)
icon_view.grab_focus()
self._sample_window.add_with_viewport(icon_view)
icon_view.show()
self._fill_samples_list(store)
width = Gdk.Screen.width() / 4
height = Gdk.Screen.height() / 4
self._sample_box.add(self._sample_window)
self.fixed.put(self._sample_box, width, height)
self._sample_window.show()
self._sample_box.show()
def _get_selected_path(self, widget, store):
try:
iter_ = store.get_iter(widget.get_selected_items()[0])
image_path = store.get(iter_, 1)[0]
return image_path, iter_
except BaseException:
return None
def _sample_selected(self, widget, store):
selected = self._get_selected_path(widget, store)
if selected is None:
self._selected_sample = None
self._sample_window.hide()
return
image_path, _iter = selected
iter_ = store.get_iter(widget.get_selected_items()[0])
image_path = store.get(iter_, 1)[0]
self._selected_sample = image_path
self._sample_window.hide()
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.WATCH))
GLib.idle_add(self._sample_loader)
def _sample_loader(self):
# Convert from thumbnail path to sample path
basename = os.path.basename(self._selected_sample)[:-4]
for suffix in [".ta", ".tb"]:
file_path = os.path.join(self._share_path, "samples", basename + suffix)
if os.path.exists(file_path):
self.tw.load_files(file_path)
break
self.win.get_window().set_cursor(Gdk.Cursor.new(Gdk.CursorType.LEFT_PTR))
def _fill_samples_list(self, store):
"""
Append images from the artwork_paths to the store.
"""
for filepath in self._scan_for_samples():
pixbuf = None
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(filepath, 100, 100)
store.append([pixbuf, filepath])
def _scan_for_samples(self):
path = os.path.join(self._share_path, "samples", "thumbnails")
samples = []
for name in os.listdir(path):
if name.endswith(".png"):
samples.append(os.path.join(path, name))
samples.sort()
return samples
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
from os.path import abspath, exists
import tornado
from concurrent.futures import ThreadPoolExecutor, Future
import functools
from thumbor.filters import FiltersFactory
from thumbor.metrics.logger_metrics import Metrics
from thumbor.url import Url
class Context:
'''
Class responsible for containing:
* Server Configuration Parameters (port, ip, key, etc);
* Configurations read from config file (or defaults);
* Importer with imported modules (engine, filters, detectors, etc);
* Request Parameters (width, height, smart, meta, etc).
Each instance of this class MUST be unique per request. This class should not be cached in the server.
'''
def __init__(self, server=None, config=None, importer=None, request_handler=None):
self.server = server
self.config = config
if importer:
self.modules = ContextImporter(self, importer)
if importer.metrics:
self.metrics = importer.metrics(config)
else:
self.metrics = Metrics(config)
else:
self.modules = None
self.metrics = Metrics(config)
self.filters_factory = FiltersFactory(self.modules.filters if self.modules else [])
self.request_handler = request_handler
self.statsd_client = self.metrics # TODO statsd_client is deprecated, remove me on next minor version bump
self.thread_pool = ThreadPool.instance(getattr(config, 'ENGINE_THREADPOOL_SIZE', 0))
self.headers = {}
class ServerParameters(object):
def __init__(self, port, ip, config_path, keyfile, log_level, app_class, fd=None, gifsicle_path=None):
self.port = port
self.ip = ip
self.config_path = config_path
self.keyfile = keyfile
self.log_level = log_level
self.app_class = app_class
self._security_key = None
self.fd = fd
self.load_security_key()
self.gifsicle_path = gifsicle_path
@property
def security_key(self):
return self._security_key
@security_key.setter
def security_key(self, key):
if isinstance(key, unicode):
key = key.encode('utf-8')
self._security_key = key
def load_security_key(self):
if not self.keyfile:
return
path = abspath(self.keyfile)
if not exists(path):
raise ValueError('Could not find security key file at %s. Please verify the keypath argument.' % path)
with open(path, 'r') as f:
security_key = f.read().strip()
self.security_key = security_key
class RequestParameters:
def __init__(self,
debug=False,
meta=False,
trim=None,
crop_left=None,
crop_top=None,
crop_right=None,
crop_bottom=None,
crop=None,
adaptive=False,
full=False,
fit_in=False,
width=0,
height=0,
horizontal_flip=False,
vertical_flip=False,
halign='center',
valign='middle',
filters=None,
smart=False,
quality=80,
image=None,
url=None,
extension=None,
buffer=None,
focal_points=None,
unsafe=False,
hash=None,
accepts_webp=False,
request=None,
max_age=None):
self.debug = bool(debug)
self.meta = bool(meta)
self.trim = trim
if trim is not None:
trim_parts = trim.split(':')
self.trim_pos = trim_parts[1] if len(trim_parts) > 1 else "top-left"
self.trim_tolerance = int(trim_parts[2]) if len(trim_parts) > 2 else 0
if crop is not None:
self.crop = crop
else:
self.crop = {
'left': self.int_or_0(crop_left),
'right': self.int_or_0(crop_right),
'top': self.int_or_0(crop_top),
'bottom': self.int_or_0(crop_bottom)
}
self.should_crop = \
self.crop['left'] > 0 or \
self.crop['top'] > 0 or \
self.crop['right'] > 0 or \
self.crop['bottom'] > 0
self.adaptive = bool(adaptive)
self.full = bool(full)
self.fit_in = bool(fit_in)
self.width = width == "orig" and "orig" or self.int_or_0(width)
self.height = height == "orig" and "orig" or self.int_or_0(height)
self.horizontal_flip = bool(horizontal_flip)
self.vertical_flip = bool(vertical_flip)
self.halign = halign or 'center'
self.valign = valign or 'middle'
self.smart = bool(smart)
if filters is None:
filters = []
self.filters = filters
self.image_url = image
self.url = url
self.detection_error = None
self.quality = quality
self.buffer = None
if focal_points is None:
focal_points = []
self.focal_points = focal_points
self.hash = hash
self.prevent_result_storage = False
self.unsafe = unsafe == 'unsafe' or unsafe is True
self.format = None
self.accepts_webp = accepts_webp
self.max_bytes = None
self.max_age = max_age
if request:
if request.query:
self.image_url += '?%s' % request.query
self.url = request.path
self.accepts_webp = 'image/webp' in request.headers.get('Accept', '')
self.image_url = Url.encode_url(self.image_url.encode('utf-8'))
def int_or_0(self, value):
return 0 if value is None else int(value)
class ContextImporter:
def __init__(self, context, importer):
self.context = context
self.importer = importer
self.engine = None
if importer.engine:
self.engine = importer.engine(context)
self.gif_engine = None
if importer.gif_engine:
self.gif_engine = importer.gif_engine(context)
self.storage = None
if importer.storage:
self.storage = importer.storage(context)
self.result_storage = None
if importer.result_storage:
self.result_storage = importer.result_storage(context)
self.upload_photo_storage = None
if importer.upload_photo_storage:
self.upload_photo_storage = importer.upload_photo_storage(context)
self.loader = importer.loader
self.detectors = importer.detectors
self.filters = importer.filters
self.optimizers = importer.optimizers
self.url_signer = importer.url_signer
class ThreadPool(object):
@classmethod
def instance(cls, size):
"""
Cache threadpool since context is
recreated for each request
"""
if not hasattr(cls, "_instance"):
cls._instance = ThreadPool(size)
return cls._instance
def __init__(self, thread_pool_size):
if thread_pool_size:
self.pool = ThreadPoolExecutor(thread_pool_size)
else:
self.pool = None
def _execute_in_foreground(self, operation, callback):
result = Future()
result.set_result(operation())
callback(result)
def _execute_in_pool(self, operation, callback):
task = self.pool.submit(operation)
task.add_done_callback(
lambda future: tornado.ioloop.IOLoop.instance().add_callback(
functools.partial(callback, future)
)
)
def queue(self, operation, callback):
if not self.pool:
self._execute_in_foreground(operation, callback)
else:
self._execute_in_pool(operation, callback)
def cleanup(self):
if self.pool:
print "Joining threads...."
self.pool.shutdown()
|
|
#
# Copyright 2013 Radware LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Avishay Balderman, Radware
from oslo.config import cfg
from neutron.db import api as qdbapi
from neutron.db.loadbalancer import loadbalancer_db
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants
LOG = logging.getLogger(__name__)
DEFAULT_DRIVER = ("neutron.services.loadbalancer.drivers.haproxy"
".plugin_driver.HaproxyOnHostPluginDriver")
lbaas_plugin_opts = [
cfg.StrOpt('driver_fqn',
default=DEFAULT_DRIVER,
help=_('LBaaS driver Fully Qualified Name'))
]
cfg.CONF.register_opts(lbaas_plugin_opts, "LBAAS")
class LoadBalancerPlugin(loadbalancer_db.LoadBalancerPluginDb):
"""Implementation of the Neutron Loadbalancer Service Plugin.
This class manages the workflow of LBaaS request/response.
Most DB related works are implemented in class
loadbalancer_db.LoadBalancerPluginDb.
"""
supported_extension_aliases = ["lbaas"]
def __init__(self):
"""Initialization for the loadbalancer service plugin."""
qdbapi.register_models()
self._load_drivers()
def _load_drivers(self):
"""Loads plugin-driver from configuration.
That method will later leverage service type framework
"""
try:
self.driver = importutils.import_object(
cfg.CONF.LBAAS.driver_fqn, self
)
except ImportError:
LOG.exception(_("Error loading LBaaS driver %s"),
cfg.CONF.LBAAS.driver_fqn)
def get_plugin_type(self):
return constants.LOADBALANCER
def get_plugin_description(self):
return "Neutron LoadBalancer Service Plugin"
def create_vip(self, context, vip):
v = super(LoadBalancerPlugin, self).create_vip(context, vip)
self.driver.create_vip(context, v)
return v
def update_vip(self, context, id, vip):
if 'status' not in vip['vip']:
vip['vip']['status'] = constants.PENDING_UPDATE
old_vip = self.get_vip(context, id)
v = super(LoadBalancerPlugin, self).update_vip(context, id, vip)
self.driver.update_vip(context, old_vip, v)
return v
def _delete_db_vip(self, context, id):
# proxy the call until plugin inherits from DBPlugin
super(LoadBalancerPlugin, self).delete_vip(context, id)
def delete_vip(self, context, id):
self.update_status(context, loadbalancer_db.Vip,
id, constants.PENDING_DELETE)
v = self.get_vip(context, id)
self.driver.delete_vip(context, v)
def create_pool(self, context, pool):
p = super(LoadBalancerPlugin, self).create_pool(context, pool)
self.driver.create_pool(context, p)
return p
def update_pool(self, context, id, pool):
if 'status' not in pool['pool']:
pool['pool']['status'] = constants.PENDING_UPDATE
old_pool = self.get_pool(context, id)
p = super(LoadBalancerPlugin, self).update_pool(context, id, pool)
self.driver.update_pool(context, old_pool, p)
return p
def _delete_db_pool(self, context, id):
# proxy the call until plugin inherits from DBPlugin
super(LoadBalancerPlugin, self).delete_pool(context, id)
def delete_pool(self, context, id):
self.update_status(context, loadbalancer_db.Pool,
id, constants.PENDING_DELETE)
p = self.get_pool(context, id)
self.driver.delete_pool(context, p)
def create_member(self, context, member):
m = super(LoadBalancerPlugin, self).create_member(context, member)
self.driver.create_member(context, m)
return m
def update_member(self, context, id, member):
if 'status' not in member['member']:
member['member']['status'] = constants.PENDING_UPDATE
old_member = self.get_member(context, id)
m = super(LoadBalancerPlugin, self).update_member(context, id, member)
self.driver.update_member(context, old_member, m)
return m
def _delete_db_member(self, context, id):
# proxy the call until plugin inherits from DBPlugin
super(LoadBalancerPlugin, self).delete_member(context, id)
def delete_member(self, context, id):
self.update_status(context, loadbalancer_db.Member,
id, constants.PENDING_DELETE)
m = self.get_member(context, id)
self.driver.delete_member(context, m)
def create_health_monitor(self, context, health_monitor):
# no PENDING_CREATE status sinse healthmon is shared DB object
hm = super(LoadBalancerPlugin, self).create_health_monitor(
context,
health_monitor
)
self.driver.create_health_monitor(context, hm)
return hm
def update_health_monitor(self, context, id, health_monitor):
if 'status' not in health_monitor['health_monitor']:
health_monitor['health_monitor']['status'] = (
constants.PENDING_UPDATE
)
old_hm = self.get_health_monitor(context, id)
hm = super(LoadBalancerPlugin, self).update_health_monitor(
context,
id,
health_monitor
)
with context.session.begin(subtransactions=True):
qry = context.session.query(
loadbalancer_db.PoolMonitorAssociation
).filter_by(monitor_id=hm['id'])
for assoc in qry:
self.driver.update_health_monitor(context, old_hm,
hm, assoc['pool_id'])
return hm
def _delete_db_pool_health_monitor(self, context, hm_id, pool_id):
super(LoadBalancerPlugin, self).delete_pool_health_monitor(context,
hm_id,
pool_id)
def delete_health_monitor(self, context, id):
with context.session.begin(subtransactions=True):
hm = self.get_health_monitor(context, id)
qry = context.session.query(
loadbalancer_db.PoolMonitorAssociation
).filter_by(monitor_id=id)
for assoc in qry:
self.driver.delete_pool_health_monitor(context,
hm,
assoc['pool_id'])
def create_pool_health_monitor(self, context, health_monitor, pool_id):
retval = super(LoadBalancerPlugin, self).create_pool_health_monitor(
context,
health_monitor,
pool_id
)
# open issue: PoolMonitorAssociation has no status field
# so we cant set the status to pending and let the driver
# set the real status of the association
self.driver.create_pool_health_monitor(
context, health_monitor, pool_id)
return retval
def delete_pool_health_monitor(self, context, id, pool_id):
hm = self.get_health_monitor(context, id)
self.driver.delete_pool_health_monitor(
context, hm, pool_id)
def stats(self, context, pool_id):
stats_data = self.driver.stats(context, pool_id)
# if we get something from the driver -
# update the db and return the value from db
# else - return what we have in db
if stats_data:
super(LoadBalancerPlugin, self)._update_pool_stats(
context,
pool_id,
stats_data
)
return super(LoadBalancerPlugin, self).stats(context,
pool_id)
def populate_vip_graph(self, context, vip):
"""Populate the vip with: pool, members, healthmonitors."""
pool = self.get_pool(context, vip['pool_id'])
vip['pool'] = pool
vip['members'] = [
self.get_member(context, member_id)
for member_id in pool['members']]
vip['health_monitors'] = [
self.get_health_monitor(context, hm_id)
for hm_id in pool['health_monitors']]
return vip
|
|
"""Support for MQTT message handling."""
import asyncio
from functools import partial, wraps
import inspect
from itertools import groupby
import json
import logging
from operator import attrgetter
import os
import ssl
import time
from typing import Any, Callable, List, Optional, Union
import attr
import certifi
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import websocket_api
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_DEVICE,
CONF_NAME,
CONF_PASSWORD,
CONF_PAYLOAD,
CONF_PORT,
CONF_PROTOCOL,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
EVENT_HOMEASSISTANT_STARTED,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.const import CONF_UNIQUE_ID # noqa: F401
from homeassistant.core import CoreState, Event, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, Unauthorized
from homeassistant.helpers import config_validation as cv, event, template
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType, ServiceDataType
from homeassistant.loader import bind_hass
from homeassistant.util import dt as dt_util
from homeassistant.util.async_ import run_callback_threadsafe
from homeassistant.util.logging import catch_log_exception
# Loading the config flow file will register the flow
from . import config_flow # noqa: F401 pylint: disable=unused-import
from . import debug_info, discovery
from .const import (
ATTR_DISCOVERY_HASH,
ATTR_DISCOVERY_PAYLOAD,
ATTR_DISCOVERY_TOPIC,
ATTR_PAYLOAD,
ATTR_QOS,
ATTR_RETAIN,
ATTR_TOPIC,
CONF_BIRTH_MESSAGE,
CONF_BROKER,
CONF_DISCOVERY,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
CONF_WILL_MESSAGE,
DEFAULT_BIRTH,
DEFAULT_DISCOVERY,
DEFAULT_PAYLOAD_AVAILABLE,
DEFAULT_PAYLOAD_NOT_AVAILABLE,
DEFAULT_PREFIX,
DEFAULT_QOS,
DEFAULT_RETAIN,
DEFAULT_WILL,
MQTT_CONNECTED,
MQTT_DISCONNECTED,
PROTOCOL_311,
)
from .debug_info import log_messages
from .discovery import (
LAST_DISCOVERY,
MQTT_DISCOVERY_UPDATED,
clear_discovery_hash,
set_discovery_hash,
)
from .models import Message, MessageCallbackType, PublishPayloadType
from .subscription import async_subscribe_topics, async_unsubscribe_topics
from .util import _VALID_QOS_SCHEMA, valid_publish_topic, valid_subscribe_topic
_LOGGER = logging.getLogger(__name__)
DOMAIN = "mqtt"
DATA_MQTT = "mqtt"
DATA_MQTT_CONFIG = "mqtt_config"
SERVICE_PUBLISH = "publish"
SERVICE_DUMP = "dump"
CONF_DISCOVERY_PREFIX = "discovery_prefix"
CONF_KEEPALIVE = "keepalive"
CONF_CERTIFICATE = "certificate"
CONF_CLIENT_KEY = "client_key"
CONF_CLIENT_CERT = "client_cert"
CONF_TLS_INSECURE = "tls_insecure"
CONF_TLS_VERSION = "tls_version"
CONF_COMMAND_TOPIC = "command_topic"
CONF_TOPIC = "topic"
CONF_AVAILABILITY = "availability"
CONF_AVAILABILITY_TOPIC = "availability_topic"
CONF_PAYLOAD_AVAILABLE = "payload_available"
CONF_PAYLOAD_NOT_AVAILABLE = "payload_not_available"
CONF_JSON_ATTRS_TOPIC = "json_attributes_topic"
CONF_JSON_ATTRS_TEMPLATE = "json_attributes_template"
CONF_IDENTIFIERS = "identifiers"
CONF_CONNECTIONS = "connections"
CONF_MANUFACTURER = "manufacturer"
CONF_MODEL = "model"
CONF_SW_VERSION = "sw_version"
CONF_VIA_DEVICE = "via_device"
CONF_DEPRECATED_VIA_HUB = "via_hub"
PROTOCOL_31 = "3.1"
DEFAULT_PORT = 1883
DEFAULT_KEEPALIVE = 60
DEFAULT_PROTOCOL = PROTOCOL_311
DEFAULT_TLS_PROTOCOL = "auto"
ATTR_PAYLOAD_TEMPLATE = "payload_template"
MAX_RECONNECT_WAIT = 300 # seconds
CONNECTION_SUCCESS = "connection_success"
CONNECTION_FAILED = "connection_failed"
CONNECTION_FAILED_RECOVERABLE = "connection_failed_recoverable"
DISCOVERY_COOLDOWN = 2
TIMEOUT_ACK = 1
def validate_device_has_at_least_one_identifier(value: ConfigType) -> ConfigType:
"""Validate that a device info entry has at least one identifying value."""
if not value.get(CONF_IDENTIFIERS) and not value.get(CONF_CONNECTIONS):
raise vol.Invalid(
"Device must have at least one identifying value in "
"'identifiers' and/or 'connections'"
)
return value
CLIENT_KEY_AUTH_MSG = (
"client_key and client_cert must both be present in "
"the MQTT broker configuration"
)
MQTT_WILL_BIRTH_SCHEMA = vol.Schema(
{
vol.Inclusive(ATTR_TOPIC, "topic_payload"): valid_publish_topic,
vol.Inclusive(ATTR_PAYLOAD, "topic_payload"): cv.string,
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA,
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
},
required=True,
)
def embedded_broker_deprecated(value):
"""Warn user that embedded MQTT broker is deprecated."""
_LOGGER.warning(
"The embedded MQTT broker has been deprecated and will stop working"
"after June 5th, 2019. Use an external broker instead. For"
"instructions, see https://www.home-assistant.io/docs/mqtt/broker"
)
return value
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.deprecated(CONF_TLS_VERSION, invalidation_version="0.115"),
vol.Schema(
{
vol.Optional(CONF_CLIENT_ID): cv.string,
vol.Optional(CONF_KEEPALIVE, default=DEFAULT_KEEPALIVE): vol.All(
vol.Coerce(int), vol.Range(min=15)
),
vol.Optional(CONF_BROKER): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_CERTIFICATE): vol.Any("auto", cv.isfile),
vol.Inclusive(
CONF_CLIENT_KEY, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG
): cv.isfile,
vol.Inclusive(
CONF_CLIENT_CERT, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG
): cv.isfile,
vol.Optional(CONF_TLS_INSECURE): cv.boolean,
vol.Optional(
CONF_TLS_VERSION, default=DEFAULT_TLS_PROTOCOL
): vol.Any("auto", "1.0", "1.1", "1.2"),
vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): vol.All(
cv.string, vol.In([PROTOCOL_31, PROTOCOL_311])
),
vol.Optional(
CONF_WILL_MESSAGE, default=DEFAULT_WILL
): MQTT_WILL_BIRTH_SCHEMA,
vol.Optional(
CONF_BIRTH_MESSAGE, default=DEFAULT_BIRTH
): MQTT_WILL_BIRTH_SCHEMA,
vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): cv.boolean,
# discovery_prefix must be a valid publish topic because if no
# state topic is specified, it will be created with the given prefix.
vol.Optional(
CONF_DISCOVERY_PREFIX, default=DEFAULT_PREFIX
): valid_publish_topic,
}
),
)
},
extra=vol.ALLOW_EXTRA,
)
SCHEMA_BASE = {vol.Optional(CONF_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA}
MQTT_AVAILABILITY_SINGLE_SCHEMA = vol.Schema(
{
vol.Exclusive(CONF_AVAILABILITY_TOPIC, "availability"): valid_subscribe_topic,
vol.Optional(
CONF_PAYLOAD_AVAILABLE, default=DEFAULT_PAYLOAD_AVAILABLE
): cv.string,
vol.Optional(
CONF_PAYLOAD_NOT_AVAILABLE, default=DEFAULT_PAYLOAD_NOT_AVAILABLE
): cv.string,
}
)
MQTT_AVAILABILITY_LIST_SCHEMA = vol.Schema(
{
vol.Exclusive(CONF_AVAILABILITY, "availability"): vol.All(
cv.ensure_list,
[
{
vol.Optional(CONF_TOPIC): valid_subscribe_topic,
vol.Optional(
CONF_PAYLOAD_AVAILABLE, default=DEFAULT_PAYLOAD_AVAILABLE
): cv.string,
vol.Optional(
CONF_PAYLOAD_NOT_AVAILABLE,
default=DEFAULT_PAYLOAD_NOT_AVAILABLE,
): cv.string,
}
],
),
}
)
MQTT_AVAILABILITY_SCHEMA = MQTT_AVAILABILITY_SINGLE_SCHEMA.extend(
MQTT_AVAILABILITY_LIST_SCHEMA.schema
)
MQTT_ENTITY_DEVICE_INFO_SCHEMA = vol.All(
cv.deprecated(CONF_DEPRECATED_VIA_HUB, CONF_VIA_DEVICE),
vol.Schema(
{
vol.Optional(CONF_IDENTIFIERS, default=list): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_CONNECTIONS, default=list): vol.All(
cv.ensure_list, [vol.All(vol.Length(2), [cv.string])]
),
vol.Optional(CONF_MANUFACTURER): cv.string,
vol.Optional(CONF_MODEL): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_SW_VERSION): cv.string,
vol.Optional(CONF_VIA_DEVICE): cv.string,
}
),
validate_device_has_at_least_one_identifier,
)
MQTT_JSON_ATTRS_SCHEMA = vol.Schema(
{
vol.Optional(CONF_JSON_ATTRS_TOPIC): valid_subscribe_topic,
vol.Optional(CONF_JSON_ATTRS_TEMPLATE): cv.template,
}
)
MQTT_BASE_PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(SCHEMA_BASE)
# Sensor type platforms subscribe to MQTT events
MQTT_RO_PLATFORM_SCHEMA = MQTT_BASE_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATE_TOPIC): valid_subscribe_topic,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
}
)
# Switch type platforms publish to MQTT and may subscribe
MQTT_RW_PLATFORM_SCHEMA = MQTT_BASE_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic,
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
}
)
# Service call validation schema
MQTT_PUBLISH_SCHEMA = vol.Schema(
{
vol.Required(ATTR_TOPIC): valid_publish_topic,
vol.Exclusive(ATTR_PAYLOAD, CONF_PAYLOAD): object,
vol.Exclusive(ATTR_PAYLOAD_TEMPLATE, CONF_PAYLOAD): cv.string,
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA,
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
},
required=True,
)
SubscribePayloadType = Union[str, bytes] # Only bytes if encoding is None
def _build_publish_data(topic: Any, qos: int, retain: bool) -> ServiceDataType:
"""Build the arguments for the publish service without the payload."""
data = {ATTR_TOPIC: topic}
if qos is not None:
data[ATTR_QOS] = qos
if retain is not None:
data[ATTR_RETAIN] = retain
return data
@bind_hass
def publish(hass: HomeAssistantType, topic, payload, qos=None, retain=None) -> None:
"""Publish message to an MQTT topic."""
hass.add_job(async_publish, hass, topic, payload, qos, retain)
@callback
@bind_hass
def async_publish(
hass: HomeAssistantType, topic: Any, payload, qos=None, retain=None
) -> None:
"""Publish message to an MQTT topic."""
data = _build_publish_data(topic, qos, retain)
data[ATTR_PAYLOAD] = payload
hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_PUBLISH, data))
@bind_hass
def publish_template(
hass: HomeAssistantType, topic, payload_template, qos=None, retain=None
) -> None:
"""Publish message to an MQTT topic."""
hass.add_job(async_publish_template, hass, topic, payload_template, qos, retain)
@bind_hass
def async_publish_template(
hass: HomeAssistantType, topic, payload_template, qos=None, retain=None
) -> None:
"""Publish message to an MQTT topic using a template payload."""
data = _build_publish_data(topic, qos, retain)
data[ATTR_PAYLOAD_TEMPLATE] = payload_template
hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_PUBLISH, data))
def wrap_msg_callback(msg_callback: MessageCallbackType) -> MessageCallbackType:
"""Wrap an MQTT message callback to support deprecated signature."""
# Check for partials to properly determine if coroutine function
check_func = msg_callback
while isinstance(check_func, partial):
check_func = check_func.func
wrapper_func = None
if asyncio.iscoroutinefunction(check_func):
@wraps(msg_callback)
async def async_wrapper(msg: Any) -> None:
"""Call with deprecated signature."""
await msg_callback(msg.topic, msg.payload, msg.qos)
wrapper_func = async_wrapper
else:
@wraps(msg_callback)
def wrapper(msg: Any) -> None:
"""Call with deprecated signature."""
msg_callback(msg.topic, msg.payload, msg.qos)
wrapper_func = wrapper
return wrapper_func
@bind_hass
async def async_subscribe(
hass: HomeAssistantType,
topic: str,
msg_callback: MessageCallbackType,
qos: int = DEFAULT_QOS,
encoding: Optional[str] = "utf-8",
):
"""Subscribe to an MQTT topic.
Call the return value to unsubscribe.
"""
# Count callback parameters which don't have a default value
non_default = 0
if msg_callback:
non_default = sum(
p.default == inspect.Parameter.empty
for _, p in inspect.signature(msg_callback).parameters.items()
)
wrapped_msg_callback = msg_callback
# If we have 3 parameters with no default value, wrap the callback
if non_default == 3:
_LOGGER.warning(
"Signature of MQTT msg_callback '%s.%s' is deprecated",
inspect.getmodule(msg_callback).__name__,
msg_callback.__name__,
)
wrapped_msg_callback = wrap_msg_callback(msg_callback)
async_remove = await hass.data[DATA_MQTT].async_subscribe(
topic,
catch_log_exception(
wrapped_msg_callback,
lambda msg: (
f"Exception in {msg_callback.__name__} when handling msg on "
f"'{msg.topic}': '{msg.payload}'"
),
),
qos,
encoding,
)
return async_remove
@bind_hass
def subscribe(
hass: HomeAssistantType,
topic: str,
msg_callback: MessageCallbackType,
qos: int = DEFAULT_QOS,
encoding: str = "utf-8",
) -> Callable[[], None]:
"""Subscribe to an MQTT topic."""
async_remove = asyncio.run_coroutine_threadsafe(
async_subscribe(hass, topic, msg_callback, qos, encoding), hass.loop
).result()
def remove():
"""Remove listener convert."""
run_callback_threadsafe(hass.loop, async_remove).result()
return remove
async def _async_setup_discovery(
hass: HomeAssistantType, conf: ConfigType, config_entry
) -> bool:
"""Try to start the discovery of MQTT devices.
This method is a coroutine.
"""
success: bool = await discovery.async_start(
hass, conf[CONF_DISCOVERY_PREFIX], config_entry
)
return success
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Start the MQTT protocol service."""
conf: Optional[ConfigType] = config.get(DOMAIN)
websocket_api.async_register_command(hass, websocket_subscribe)
websocket_api.async_register_command(hass, websocket_remove_device)
websocket_api.async_register_command(hass, websocket_mqtt_info)
if conf is None:
# If we have a config entry, setup is done by that config entry.
# If there is no config entry, this should fail.
return bool(hass.config_entries.async_entries(DOMAIN))
conf = dict(conf)
hass.data[DATA_MQTT_CONFIG] = conf
# Only import if we haven't before.
if not hass.config_entries.async_entries(DOMAIN):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={}
)
)
return True
def _merge_config(entry, conf):
"""Merge configuration.yaml config with config entry."""
return {**conf, **entry.data}
async def async_setup_entry(hass, entry):
"""Load a config entry."""
conf = hass.data.get(DATA_MQTT_CONFIG)
# Config entry was created because user had configuration.yaml entry
# They removed that, so remove entry.
if conf is None and entry.source == config_entries.SOURCE_IMPORT:
hass.async_create_task(hass.config_entries.async_remove(entry.entry_id))
return False
# If user didn't have configuration.yaml config, generate defaults
if conf is None:
conf = CONFIG_SCHEMA({DOMAIN: dict(entry.data)})[DOMAIN]
elif any(key in conf for key in entry.data):
shared_keys = conf.keys() & entry.data.keys()
override = {k: entry.data[k] for k in shared_keys}
if CONF_PASSWORD in override:
override[CONF_PASSWORD] = "********"
_LOGGER.info(
"Data in your configuration entry is going to override your "
"configuration.yaml: %s",
override,
)
conf = _merge_config(entry, conf)
hass.data[DATA_MQTT] = MQTT(hass, entry, conf,)
await hass.data[DATA_MQTT].async_connect()
async def async_stop_mqtt(_event: Event):
"""Stop MQTT component."""
await hass.data[DATA_MQTT].async_disconnect()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_stop_mqtt)
async def async_publish_service(call: ServiceCall):
"""Handle MQTT publish service calls."""
msg_topic: str = call.data[ATTR_TOPIC]
payload = call.data.get(ATTR_PAYLOAD)
payload_template = call.data.get(ATTR_PAYLOAD_TEMPLATE)
qos: int = call.data[ATTR_QOS]
retain: bool = call.data[ATTR_RETAIN]
if payload_template is not None:
try:
payload = template.Template(payload_template, hass).async_render()
except template.jinja2.TemplateError as exc:
_LOGGER.error(
"Unable to publish to %s: rendering payload template of "
"%s failed because %s",
msg_topic,
payload_template,
exc,
)
return
await hass.data[DATA_MQTT].async_publish(msg_topic, payload, qos, retain)
hass.services.async_register(
DOMAIN, SERVICE_PUBLISH, async_publish_service, schema=MQTT_PUBLISH_SCHEMA
)
async def async_dump_service(call: ServiceCall):
"""Handle MQTT dump service calls."""
messages = []
@callback
def collect_msg(msg):
messages.append((msg.topic, msg.payload.replace("\n", "")))
unsub = await async_subscribe(hass, call.data["topic"], collect_msg)
def write_dump():
with open(hass.config.path("mqtt_dump.txt"), "wt") as fp:
for msg in messages:
fp.write(",".join(msg) + "\n")
async def finish_dump(_):
"""Write dump to file."""
unsub()
await hass.async_add_executor_job(write_dump)
event.async_call_later(hass, call.data["duration"], finish_dump)
hass.services.async_register(
DOMAIN,
SERVICE_DUMP,
async_dump_service,
schema=vol.Schema(
{
vol.Required("topic"): valid_subscribe_topic,
vol.Optional("duration", default=5): int,
}
),
)
if conf.get(CONF_DISCOVERY):
await _async_setup_discovery(hass, conf, entry)
return True
@attr.s(slots=True, frozen=True)
class Subscription:
"""Class to hold data about an active subscription."""
topic: str = attr.ib()
callback: MessageCallbackType = attr.ib()
qos: int = attr.ib(default=0)
encoding: str = attr.ib(default="utf-8")
class MQTT:
"""Home Assistant MQTT client."""
def __init__(self, hass: HomeAssistantType, config_entry, conf,) -> None:
"""Initialize Home Assistant MQTT client."""
# We don't import on the top because some integrations
# should be able to optionally rely on MQTT.
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
self.hass = hass
self.config_entry = config_entry
self.conf = conf
self.subscriptions: List[Subscription] = []
self.connected = False
self._ha_started = asyncio.Event()
self._last_subscribe = time.time()
self._mqttc: mqtt.Client = None
self._paho_lock = asyncio.Lock()
self._pending_operations = {}
if self.hass.state == CoreState.running:
self._ha_started.set()
else:
@callback
def ha_started(_):
self._ha_started.set()
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, ha_started)
self.init_client()
self.config_entry.add_update_listener(self.async_config_entry_updated)
@staticmethod
async def async_config_entry_updated(hass, entry) -> None:
"""Handle signals of config entry being updated.
This is a static method because a class method (bound method), can not be used with weak references.
Causes for this is config entry options changing.
"""
self = hass.data[DATA_MQTT]
conf = hass.data.get(DATA_MQTT_CONFIG)
if conf is None:
conf = CONFIG_SCHEMA({DOMAIN: dict(entry.data)})[DOMAIN]
self.conf = _merge_config(entry, conf)
await self.async_disconnect()
self.init_client()
await self.async_connect()
await discovery.async_stop(hass)
if self.conf.get(CONF_DISCOVERY):
await _async_setup_discovery(hass, self.conf, entry)
def init_client(self):
"""Initialize paho client."""
# We don't import on the top because some integrations
# should be able to optionally rely on MQTT.
import paho.mqtt.client as mqtt # pylint: disable=import-outside-toplevel
if self.conf[CONF_PROTOCOL] == PROTOCOL_31:
proto: int = mqtt.MQTTv31
else:
proto = mqtt.MQTTv311
client_id = self.conf.get(CONF_CLIENT_ID)
if client_id is None:
self._mqttc = mqtt.Client(protocol=proto)
else:
self._mqttc = mqtt.Client(client_id, protocol=proto)
# Enable logging
self._mqttc.enable_logger()
username = self.conf.get(CONF_USERNAME)
password = self.conf.get(CONF_PASSWORD)
if username is not None:
self._mqttc.username_pw_set(username, password)
certificate = self.conf.get(CONF_CERTIFICATE)
# For cloudmqtt.com, secured connection, auto fill in certificate
if (
certificate is None
and 19999 < self.conf[CONF_PORT] < 30000
and self.conf[CONF_BROKER].endswith(".cloudmqtt.com")
):
certificate = os.path.join(
os.path.dirname(__file__), "addtrustexternalcaroot.crt"
)
# When the certificate is set to auto, use bundled certs from certifi
elif certificate == "auto":
certificate = certifi.where()
client_key = self.conf.get(CONF_CLIENT_KEY)
client_cert = self.conf.get(CONF_CLIENT_CERT)
tls_insecure = self.conf.get(CONF_TLS_INSECURE)
if certificate is not None:
self._mqttc.tls_set(
certificate,
certfile=client_cert,
keyfile=client_key,
tls_version=ssl.PROTOCOL_TLS,
)
if tls_insecure is not None:
self._mqttc.tls_insecure_set(tls_insecure)
self._mqttc.on_connect = self._mqtt_on_connect
self._mqttc.on_disconnect = self._mqtt_on_disconnect
self._mqttc.on_message = self._mqtt_on_message
self._mqttc.on_publish = self._mqtt_on_callback
self._mqttc.on_subscribe = self._mqtt_on_callback
self._mqttc.on_unsubscribe = self._mqtt_on_callback
if (
CONF_WILL_MESSAGE in self.conf
and ATTR_TOPIC in self.conf[CONF_WILL_MESSAGE]
):
will_message = Message(**self.conf[CONF_WILL_MESSAGE])
else:
will_message = None
if will_message is not None:
self._mqttc.will_set( # pylint: disable=no-value-for-parameter
topic=will_message.topic,
payload=will_message.payload,
qos=will_message.qos,
retain=will_message.retain,
)
async def async_publish(
self, topic: str, payload: PublishPayloadType, qos: int, retain: bool
) -> None:
"""Publish a MQTT message."""
async with self._paho_lock:
msg_info = await self.hass.async_add_executor_job(
self._mqttc.publish, topic, payload, qos, retain
)
_LOGGER.debug(
"Transmitting message on %s: '%s', mid: %s",
topic,
payload,
msg_info.mid,
)
_raise_on_error(msg_info.rc)
await self._wait_for_mid(msg_info.mid)
async def async_connect(self) -> str:
"""Connect to the host. Does not process messages yet."""
# pylint: disable=import-outside-toplevel
import paho.mqtt.client as mqtt
result: int = None
try:
result = await self.hass.async_add_executor_job(
self._mqttc.connect,
self.conf[CONF_BROKER],
self.conf[CONF_PORT],
self.conf[CONF_KEEPALIVE],
)
except OSError as err:
_LOGGER.error("Failed to connect to MQTT server due to exception: %s", err)
if result is not None and result != 0:
_LOGGER.error(
"Failed to connect to MQTT server: %s", mqtt.error_string(result)
)
self._mqttc.loop_start()
async def async_disconnect(self):
"""Stop the MQTT client."""
def stop():
"""Stop the MQTT client."""
# Do not disconnect, we want the broker to always publish will
self._mqttc.loop_stop()
await self.hass.async_add_executor_job(stop)
async def async_subscribe(
self,
topic: str,
msg_callback: MessageCallbackType,
qos: int,
encoding: Optional[str] = None,
) -> Callable[[], None]:
"""Set up a subscription to a topic with the provided qos.
This method is a coroutine.
"""
if not isinstance(topic, str):
raise HomeAssistantError("Topic needs to be a string!")
subscription = Subscription(topic, msg_callback, qos, encoding)
self.subscriptions.append(subscription)
# Only subscribe if currently connected.
if self.connected:
self._last_subscribe = time.time()
await self._async_perform_subscription(topic, qos)
@callback
def async_remove() -> None:
"""Remove subscription."""
if subscription not in self.subscriptions:
raise HomeAssistantError("Can't remove subscription twice")
self.subscriptions.remove(subscription)
if any(other.topic == topic for other in self.subscriptions):
# Other subscriptions on topic remaining - don't unsubscribe.
return
# Only unsubscribe if currently connected.
if self.connected:
self.hass.async_create_task(self._async_unsubscribe(topic))
return async_remove
async def _async_unsubscribe(self, topic: str) -> None:
"""Unsubscribe from a topic.
This method is a coroutine.
"""
async with self._paho_lock:
result: int = None
result, mid = await self.hass.async_add_executor_job(
self._mqttc.unsubscribe, topic
)
_LOGGER.debug("Unsubscribing from %s, mid: %s", topic, mid)
_raise_on_error(result)
await self._wait_for_mid(mid)
async def _async_perform_subscription(self, topic: str, qos: int) -> None:
"""Perform a paho-mqtt subscription."""
async with self._paho_lock:
result: int = None
result, mid = await self.hass.async_add_executor_job(
self._mqttc.subscribe, topic, qos
)
_LOGGER.debug("Subscribing to %s, mid: %s", topic, mid)
_raise_on_error(result)
await self._wait_for_mid(mid)
def _mqtt_on_connect(self, _mqttc, _userdata, _flags, result_code: int) -> None:
"""On connect callback.
Resubscribe to all topics we were subscribed to and publish birth
message.
"""
# pylint: disable=import-outside-toplevel
import paho.mqtt.client as mqtt
if result_code != mqtt.CONNACK_ACCEPTED:
_LOGGER.error(
"Unable to connect to the MQTT broker: %s",
mqtt.connack_string(result_code),
)
return
self.connected = True
dispatcher_send(self.hass, MQTT_CONNECTED)
_LOGGER.info(
"Connected to MQTT server %s:%s (%s)",
self.conf[CONF_BROKER],
self.conf[CONF_PORT],
result_code,
)
# Group subscriptions to only re-subscribe once for each topic.
keyfunc = attrgetter("topic")
for topic, subs in groupby(sorted(self.subscriptions, key=keyfunc), keyfunc):
# Re-subscribe with the highest requested qos
max_qos = max(subscription.qos for subscription in subs)
self.hass.add_job(self._async_perform_subscription, topic, max_qos)
if (
CONF_BIRTH_MESSAGE in self.conf
and ATTR_TOPIC in self.conf[CONF_BIRTH_MESSAGE]
):
async def publish_birth_message(birth_message):
await self._ha_started.wait() # Wait for Home Assistant to start
await self._discovery_cooldown() # Wait for MQTT discovery to cool down
await self.async_publish( # pylint: disable=no-value-for-parameter
topic=birth_message.topic,
payload=birth_message.payload,
qos=birth_message.qos,
retain=birth_message.retain,
)
birth_message = Message(**self.conf[CONF_BIRTH_MESSAGE])
self.hass.loop.create_task(publish_birth_message(birth_message))
def _mqtt_on_message(self, _mqttc, _userdata, msg) -> None:
"""Message received callback."""
self.hass.add_job(self._mqtt_handle_message, msg)
@callback
def _mqtt_handle_message(self, msg) -> None:
_LOGGER.debug(
"Received message on %s%s: %s",
msg.topic,
" (retained)" if msg.retain else "",
msg.payload,
)
timestamp = dt_util.utcnow()
for subscription in self.subscriptions:
if not _match_topic(subscription.topic, msg.topic):
continue
payload: SubscribePayloadType = msg.payload
if subscription.encoding is not None:
try:
payload = msg.payload.decode(subscription.encoding)
except (AttributeError, UnicodeDecodeError):
_LOGGER.warning(
"Can't decode payload %s on %s with encoding %s (for %s)",
msg.payload,
msg.topic,
subscription.encoding,
subscription.callback,
)
continue
self.hass.async_run_job(
subscription.callback,
Message(
msg.topic,
payload,
msg.qos,
msg.retain,
subscription.topic,
timestamp,
),
)
def _mqtt_on_callback(self, _mqttc, _userdata, mid, _granted_qos=None) -> None:
"""Publish / Subscribe / Unsubscribe callback."""
self.hass.add_job(self._mqtt_handle_mid, mid)
@callback
def _mqtt_handle_mid(self, mid) -> None:
# Create the mid event if not created, either _mqtt_handle_mid or _wait_for_mid
# may be executed first.
if mid not in self._pending_operations:
self._pending_operations[mid] = asyncio.Event()
self._pending_operations[mid].set()
def _mqtt_on_disconnect(self, _mqttc, _userdata, result_code: int) -> None:
"""Disconnected callback."""
self.connected = False
dispatcher_send(self.hass, MQTT_DISCONNECTED)
_LOGGER.warning(
"Disconnected from MQTT server %s:%s (%s)",
self.conf[CONF_BROKER],
self.conf[CONF_PORT],
result_code,
)
async def _wait_for_mid(self, mid):
"""Wait for ACK from broker."""
# Create the mid event if not created, either _mqtt_handle_mid or _wait_for_mid
# may be executed first.
if mid not in self._pending_operations:
self._pending_operations[mid] = asyncio.Event()
try:
await asyncio.wait_for(self._pending_operations[mid].wait(), TIMEOUT_ACK)
except asyncio.TimeoutError:
_LOGGER.error("Timed out waiting for mid %s", mid)
finally:
del self._pending_operations[mid]
async def _discovery_cooldown(self):
now = time.time()
# Reset discovery and subscribe cooldowns
self.hass.data[LAST_DISCOVERY] = now
self._last_subscribe = now
last_discovery = self.hass.data[LAST_DISCOVERY]
last_subscribe = self._last_subscribe
wait_until = max(
last_discovery + DISCOVERY_COOLDOWN, last_subscribe + DISCOVERY_COOLDOWN
)
while now < wait_until:
await asyncio.sleep(wait_until - now)
now = time.time()
last_discovery = self.hass.data[LAST_DISCOVERY]
last_subscribe = self._last_subscribe
wait_until = max(
last_discovery + DISCOVERY_COOLDOWN, last_subscribe + DISCOVERY_COOLDOWN
)
def _raise_on_error(result_code: int) -> None:
"""Raise error if error result."""
# pylint: disable=import-outside-toplevel
import paho.mqtt.client as mqtt
if result_code != 0:
raise HomeAssistantError(
f"Error talking to MQTT: {mqtt.error_string(result_code)}"
)
def _match_topic(subscription: str, topic: str) -> bool:
"""Test if topic matches subscription."""
# pylint: disable=import-outside-toplevel
from paho.mqtt.matcher import MQTTMatcher
matcher = MQTTMatcher()
matcher[subscription] = True
try:
next(matcher.iter_match(topic))
return True
except StopIteration:
return False
class MqttAttributes(Entity):
"""Mixin used for platforms that support JSON attributes."""
def __init__(self, config: dict) -> None:
"""Initialize the JSON attributes mixin."""
self._attributes = None
self._attributes_sub_state = None
self._attributes_config = config
async def async_added_to_hass(self) -> None:
"""Subscribe MQTT events."""
await super().async_added_to_hass()
await self._attributes_subscribe_topics()
async def attributes_discovery_update(self, config: dict):
"""Handle updated discovery message."""
self._attributes_config = config
await self._attributes_subscribe_topics()
async def _attributes_subscribe_topics(self):
"""(Re)Subscribe to topics."""
attr_tpl = self._attributes_config.get(CONF_JSON_ATTRS_TEMPLATE)
if attr_tpl is not None:
attr_tpl.hass = self.hass
@callback
@log_messages(self.hass, self.entity_id)
def attributes_message_received(msg: Message) -> None:
try:
payload = msg.payload
if attr_tpl is not None:
payload = attr_tpl.async_render_with_possible_json_value(payload)
json_dict = json.loads(payload)
if isinstance(json_dict, dict):
self._attributes = json_dict
self.async_write_ha_state()
else:
_LOGGER.warning("JSON result was not a dictionary")
self._attributes = None
except ValueError:
_LOGGER.warning("Erroneous JSON: %s", payload)
self._attributes = None
self._attributes_sub_state = await async_subscribe_topics(
self.hass,
self._attributes_sub_state,
{
CONF_JSON_ATTRS_TOPIC: {
"topic": self._attributes_config.get(CONF_JSON_ATTRS_TOPIC),
"msg_callback": attributes_message_received,
"qos": self._attributes_config.get(CONF_QOS),
}
},
)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._attributes_sub_state = await async_unsubscribe_topics(
self.hass, self._attributes_sub_state
)
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
class MqttAvailability(Entity):
"""Mixin used for platforms that report availability."""
def __init__(self, config: dict) -> None:
"""Initialize the availability mixin."""
self._availability_sub_state = None
self._available = False
self._availability_setup_from_config(config)
async def async_added_to_hass(self) -> None:
"""Subscribe MQTT events."""
await super().async_added_to_hass()
await self._availability_subscribe_topics()
self.async_on_remove(
async_dispatcher_connect(self.hass, MQTT_CONNECTED, self.async_mqtt_connect)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass, MQTT_DISCONNECTED, self.async_mqtt_connect
)
)
async def availability_discovery_update(self, config: dict):
"""Handle updated discovery message."""
self._availability_setup_from_config(config)
await self._availability_subscribe_topics()
def _availability_setup_from_config(self, config):
"""(Re)Setup."""
self._avail_topics = {}
if CONF_AVAILABILITY_TOPIC in config:
self._avail_topics[config[CONF_AVAILABILITY_TOPIC]] = {
CONF_PAYLOAD_AVAILABLE: config[CONF_PAYLOAD_AVAILABLE],
CONF_PAYLOAD_NOT_AVAILABLE: config[CONF_PAYLOAD_NOT_AVAILABLE],
}
if CONF_AVAILABILITY in config:
for avail in config[CONF_AVAILABILITY]:
self._avail_topics[avail[CONF_TOPIC]] = {
CONF_PAYLOAD_AVAILABLE: avail[CONF_PAYLOAD_AVAILABLE],
CONF_PAYLOAD_NOT_AVAILABLE: avail[CONF_PAYLOAD_NOT_AVAILABLE],
}
self._avail_config = config
async def _availability_subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
@log_messages(self.hass, self.entity_id)
def availability_message_received(msg: Message) -> None:
"""Handle a new received MQTT availability message."""
topic = msg.topic
if msg.payload == self._avail_topics[topic][CONF_PAYLOAD_AVAILABLE]:
self._available = True
elif msg.payload == self._avail_topics[topic][CONF_PAYLOAD_NOT_AVAILABLE]:
self._available = False
self.async_write_ha_state()
topics = {}
for topic in self._avail_topics:
topics[f"availability_{topic}"] = {
"topic": topic,
"msg_callback": availability_message_received,
"qos": self._avail_config[CONF_QOS],
}
self._availability_sub_state = await async_subscribe_topics(
self.hass, self._availability_sub_state, topics,
)
@callback
def async_mqtt_connect(self):
"""Update state on connection/disconnection to MQTT broker."""
if not self.hass.is_stopping:
self.async_write_ha_state()
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._availability_sub_state = await async_unsubscribe_topics(
self.hass, self._availability_sub_state
)
@property
def available(self) -> bool:
"""Return if the device is available."""
if not self.hass.data[DATA_MQTT].connected and not self.hass.is_stopping:
return False
return not self._avail_topics or self._available
async def cleanup_device_registry(hass, device_id):
"""Remove device registry entry if there are no remaining entities or triggers."""
# Local import to avoid circular dependencies
# pylint: disable=import-outside-toplevel
from . import device_trigger
device_registry = await hass.helpers.device_registry.async_get_registry()
entity_registry = await hass.helpers.entity_registry.async_get_registry()
if (
device_id
and not hass.helpers.entity_registry.async_entries_for_device(
entity_registry, device_id
)
and not await device_trigger.async_get_triggers(hass, device_id)
):
device_registry.async_remove_device(device_id)
class MqttDiscoveryUpdate(Entity):
"""Mixin used to handle updated discovery message."""
def __init__(self, discovery_data, discovery_update=None) -> None:
"""Initialize the discovery update mixin."""
self._discovery_data = discovery_data
self._discovery_update = discovery_update
self._remove_signal = None
self._removed_from_hass = False
async def async_added_to_hass(self) -> None:
"""Subscribe to discovery updates."""
await super().async_added_to_hass()
self._removed_from_hass = False
discovery_hash = (
self._discovery_data[ATTR_DISCOVERY_HASH] if self._discovery_data else None
)
async def _async_remove_state_and_registry_entry(self) -> None:
"""Remove entity's state and entity registry entry.
Remove entity from entity registry if it is registered, this also removes the state.
If the entity is not in the entity registry, just remove the state.
"""
entity_registry = (
await self.hass.helpers.entity_registry.async_get_registry()
)
if entity_registry.async_is_registered(self.entity_id):
entity_entry = entity_registry.async_get(self.entity_id)
entity_registry.async_remove(self.entity_id)
await cleanup_device_registry(self.hass, entity_entry.device_id)
else:
await self.async_remove()
@callback
async def discovery_callback(payload):
"""Handle discovery update."""
_LOGGER.info(
"Got update for entity with hash: %s '%s'", discovery_hash, payload,
)
old_payload = self._discovery_data[ATTR_DISCOVERY_PAYLOAD]
debug_info.update_entity_discovery_data(self.hass, payload, self.entity_id)
if not payload:
# Empty payload: Remove component
_LOGGER.info("Removing component: %s", self.entity_id)
self._cleanup_discovery_on_remove()
await _async_remove_state_and_registry_entry(self)
elif self._discovery_update:
if old_payload != self._discovery_data[ATTR_DISCOVERY_PAYLOAD]:
# Non-empty, changed payload: Notify component
_LOGGER.info("Updating component: %s", self.entity_id)
await self._discovery_update(payload)
else:
# Non-empty, unchanged payload: Ignore to avoid changing states
_LOGGER.info("Ignoring unchanged update for: %s", self.entity_id)
if discovery_hash:
debug_info.add_entity_discovery_data(
self.hass, self._discovery_data, self.entity_id
)
# Set in case the entity has been removed and is re-added
set_discovery_hash(self.hass, discovery_hash)
self._remove_signal = async_dispatcher_connect(
self.hass,
MQTT_DISCOVERY_UPDATED.format(discovery_hash),
discovery_callback,
)
async def async_removed_from_registry(self) -> None:
"""Clear retained discovery topic in broker."""
if not self._removed_from_hass:
discovery_topic = self._discovery_data[ATTR_DISCOVERY_TOPIC]
publish(
self.hass, discovery_topic, "", retain=True,
)
async def async_will_remove_from_hass(self) -> None:
"""Stop listening to signal and cleanup discovery data.."""
self._cleanup_discovery_on_remove()
def _cleanup_discovery_on_remove(self) -> None:
"""Stop listening to signal and cleanup discovery data."""
if self._discovery_data and not self._removed_from_hass:
debug_info.remove_entity_data(self.hass, self.entity_id)
clear_discovery_hash(self.hass, self._discovery_data[ATTR_DISCOVERY_HASH])
self._removed_from_hass = True
if self._remove_signal:
self._remove_signal()
self._remove_signal = None
def device_info_from_config(config):
"""Return a device description for device registry."""
if not config:
return None
info = {
"identifiers": {(DOMAIN, id_) for id_ in config[CONF_IDENTIFIERS]},
"connections": {tuple(x) for x in config[CONF_CONNECTIONS]},
}
if CONF_MANUFACTURER in config:
info["manufacturer"] = config[CONF_MANUFACTURER]
if CONF_MODEL in config:
info["model"] = config[CONF_MODEL]
if CONF_NAME in config:
info["name"] = config[CONF_NAME]
if CONF_SW_VERSION in config:
info["sw_version"] = config[CONF_SW_VERSION]
if CONF_VIA_DEVICE in config:
info["via_device"] = (DOMAIN, config[CONF_VIA_DEVICE])
return info
class MqttEntityDeviceInfo(Entity):
"""Mixin used for mqtt platforms that support the device registry."""
def __init__(self, device_config: Optional[ConfigType], config_entry=None) -> None:
"""Initialize the device mixin."""
self._device_config = device_config
self._config_entry = config_entry
async def device_info_discovery_update(self, config: dict):
"""Handle updated discovery message."""
self._device_config = config.get(CONF_DEVICE)
device_registry = await self.hass.helpers.device_registry.async_get_registry()
config_entry_id = self._config_entry.entry_id
device_info = self.device_info
if config_entry_id is not None and device_info is not None:
device_info["config_entry_id"] = config_entry_id
device_registry.async_get_or_create(**device_info)
@property
def device_info(self):
"""Return a device description for device registry."""
return device_info_from_config(self._device_config)
@websocket_api.websocket_command(
{vol.Required("type"): "mqtt/device/debug_info", vol.Required("device_id"): str}
)
@websocket_api.async_response
async def websocket_mqtt_info(hass, connection, msg):
"""Get MQTT debug info for device."""
device_id = msg["device_id"]
mqtt_info = await debug_info.info_for_device(hass, device_id)
connection.send_result(msg["id"], mqtt_info)
@websocket_api.websocket_command(
{vol.Required("type"): "mqtt/device/remove", vol.Required("device_id"): str}
)
@websocket_api.async_response
async def websocket_remove_device(hass, connection, msg):
"""Delete device."""
device_id = msg["device_id"]
dev_registry = await hass.helpers.device_registry.async_get_registry()
device = dev_registry.async_get(device_id)
if not device:
connection.send_error(
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Device not found"
)
return
for config_entry in device.config_entries:
config_entry = hass.config_entries.async_get_entry(config_entry)
# Only delete the device if it belongs to an MQTT device entry
if config_entry.domain == DOMAIN:
dev_registry.async_remove_device(device_id)
connection.send_message(websocket_api.result_message(msg["id"]))
return
connection.send_error(
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Non MQTT device"
)
@websocket_api.async_response
@websocket_api.websocket_command(
{
vol.Required("type"): "mqtt/subscribe",
vol.Required("topic"): valid_subscribe_topic,
}
)
async def websocket_subscribe(hass, connection, msg):
"""Subscribe to a MQTT topic."""
if not connection.user.is_admin:
raise Unauthorized
async def forward_messages(mqttmsg: Message):
"""Forward events to websocket."""
connection.send_message(
websocket_api.event_message(
msg["id"],
{
"topic": mqttmsg.topic,
"payload": mqttmsg.payload,
"qos": mqttmsg.qos,
"retain": mqttmsg.retain,
},
)
)
connection.subscriptions[msg["id"]] = await async_subscribe(
hass, msg["topic"], forward_messages
)
connection.send_message(websocket_api.result_message(msg["id"]))
|
|
#!/usr/bin/env python
# coding=utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# -*- coding: utf-8 -*-
import sys
import os
import mxnet as mx
import numpy as np
import argparse
import logging
import time
from mxnet import random
from mxnet.initializer import Xavier, Initializer
import data_helpers
fmt = '%(asctime)s:filename %(filename)s: lineno %(lineno)d:%(levelname)s:%(message)s'
logging.basicConfig(format=fmt, filemode='a+', filename='./cnn_text_classification.log', level=logging.DEBUG)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser(description="CNN for text classification",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--pretrained-embedding', type=bool, default=False,
help='use pre-trained word2vec')
parser.add_argument('--num-embed', type=int, default=300,
help='embedding layer size')
parser.add_argument('--gpus', type=str, default='',
help='list of gpus to run, e.g. 0 or 0,2,5. empty means using cpu. ')
parser.add_argument('--kv-store', type=str, default='local',
help='key-value store type')
parser.add_argument('--num-epochs', type=int, default=150,
help='max num of epochs')
parser.add_argument('--batch-size', type=int, default=50,
help='the batch size.')
parser.add_argument('--optimizer', type=str, default='rmsprop',
help='the optimizer type')
parser.add_argument('--lr', type=float, default=0.0005,
help='initial learning rate')
parser.add_argument('--dropout', type=float, default=0.0,
help='dropout rate')
parser.add_argument('--disp-batches', type=int, default=50,
help='show progress for every n batches')
parser.add_argument('--save-period', type=int, default=10,
help='save checkpoint for every n epochs')
def save_model():
if not os.path.exists("checkpoint"):
os.mkdir("checkpoint")
return mx.callback.do_checkpoint("checkpoint/checkpoint", args.save_period)
def highway(data):
_data = data
high_weight = mx.sym.Variable('high_weight')
high_bias = mx.sym.Variable('high_bias')
high_fc = mx.sym.FullyConnected(data=data, weight=high_weight, bias=high_bias, num_hidden=300, name='high_fc')
high_relu = mx.sym.Activation(high_fc, act_type='relu')
high_trans_weight = mx.sym.Variable('high_trans_weight')
high_trans_bias = mx.sym.Variable('high_trans_bias')
high_trans_fc = mx.sym.FullyConnected(data=_data, weight=high_trans_weight, bias=high_trans_bias, num_hidden=300,
name='high_trans_sigmoid')
high_trans_sigmoid = mx.sym.Activation(high_trans_fc, act_type='sigmoid')
return high_relu * high_trans_sigmoid + _data * (1 - high_trans_sigmoid)
def data_iter(batch_size, num_embed, pre_trained_word2vec=False):
logger.info('Loading data...')
if pre_trained_word2vec:
word2vec = data_helpers.load_pretrained_word2vec('data/rt.vec')
x, y = data_helpers.load_data_with_word2vec(word2vec)
# reshpae for convolution input
x = np.reshape(x, (x.shape[0], 1, x.shape[1], x.shape[2]))
embed_size = x.shape[-1]
sentence_size = x.shape[2]
vocab_size = -1
else:
x, y, vocab, vocab_inv = data_helpers.load_data()
embed_size = num_embed
sentence_size = x.shape[1]
vocab_size = len(vocab)
# randomly shuffle data
np.random.seed(10)
shuffle_indices = np.random.permutation(np.arange(len(y)))
x_shuffled = x[shuffle_indices]
y_shuffled = y[shuffle_indices]
# split train/valid set
x_train, x_dev = x_shuffled[:-1000], x_shuffled[-1000:]
y_train, y_dev = y_shuffled[:-1000], y_shuffled[-1000:]
logger.info('Train/Valid split: %d/%d' % (len(y_train), len(y_dev)))
logger.info('train shape: %(shape)s', {'shape': x_train.shape})
logger.info('valid shape: %(shape)s', {'shape': x_dev.shape})
logger.info('sentence max words: %(shape)s', {'shape': sentence_size})
logger.info('embedding size: %(msg)s', {'msg': embed_size})
logger.info('vocab size: %(msg)s', {'msg': vocab_size})
train = mx.io.NDArrayIter(
x_train, y_train, batch_size, shuffle=True)
valid = mx.io.NDArrayIter(
x_dev, y_dev, batch_size)
return (train, valid, sentence_size, embed_size, vocab_size)
def sym_gen(batch_size, sentence_size, num_embed, vocab_size,
num_label=2, filter_list=[3, 4, 5], num_filter=100,
dropout=0.0, pre_trained_word2vec=False):
input_x = mx.sym.Variable('data')
input_y = mx.sym.Variable('softmax_label')
# embedding layer
if not pre_trained_word2vec:
embed_layer = mx.sym.Embedding(data=input_x, input_dim=vocab_size, output_dim=num_embed, name='vocab_embed')
conv_input = mx.sym.Reshape(data=embed_layer, target_shape=(batch_size, 1, sentence_size, num_embed))
else:
conv_input = input_x
# create convolution + (max) pooling layer for each filter operation
pooled_outputs = []
for i, filter_size in enumerate(filter_list):
convi = mx.sym.Convolution(data=conv_input, kernel=(filter_size, num_embed), num_filter=num_filter)
relui = mx.sym.Activation(data=convi, act_type='relu')
pooli = mx.sym.Pooling(data=relui, pool_type='max', kernel=(sentence_size - filter_size + 1, 1), stride=(1, 1))
pooled_outputs.append(pooli)
# combine all pooled outputs
total_filters = num_filter * len(filter_list)
concat = mx.sym.Concat(*pooled_outputs, dim=1)
h_pool = mx.sym.Reshape(data=concat, target_shape=(batch_size, total_filters))
# highway network
h_pool = highway(h_pool)
# dropout layer
if dropout > 0.0:
h_drop = mx.sym.Dropout(data=h_pool, p=dropout)
else:
h_drop = h_pool
# fully connected
cls_weight = mx.sym.Variable('cls_weight')
cls_bias = mx.sym.Variable('cls_bias')
fc = mx.sym.FullyConnected(data=h_drop, weight=cls_weight, bias=cls_bias, num_hidden=num_label)
# softmax output
sm = mx.sym.SoftmaxOutput(data=fc, label=input_y, name='softmax')
return sm, ('data',), ('softmax_label',)
def train(symbol, train_iter, valid_iter, data_names, label_names):
devs = mx.cpu() if args.gpus is None or args.gpus is '' else [
mx.gpu(int(i)) for i in args.gpus.split(',')]
module = mx.mod.Module(symbol, data_names=data_names, label_names=label_names, context=devs)
init_params = {
'vocab_embed_weight': {'uniform': 0.1},
'convolution0_weight': {'uniform': 0.1}, 'convolution0_bias': {'costant': 0},
'convolution1_weight': {'uniform': 0.1}, 'convolution1_bias': {'costant': 0},
'convolution2_weight': {'uniform': 0.1}, 'convolution2_bias': {'costant': 0},
'high_weight': {'uniform': 0.1}, 'high_bias': {'costant': 0},
'high_trans_weight': {'uniform': 0.1}, 'high_trans_bias': {'costant': -2},
'cls_weight': {'uniform': 0.1}, 'cls_bias': {'costant': 0},
}
# custom init_params
module.bind(data_shapes=train_iter.provide_data, label_shapes=train_iter.provide_label)
module.init_params(CustomInit(init_params))
lr_sch = mx.lr_scheduler.FactorScheduler(step=25000, factor=0.999)
module.init_optimizer(
optimizer='rmsprop', optimizer_params={'learning_rate': 0.0005, 'lr_scheduler': lr_sch})
def norm_stat(d):
return mx.nd.norm(d) / np.sqrt(d.size)
mon = mx.mon.Monitor(25000, norm_stat)
module.fit(train_data=train_iter,
eval_data=valid_iter,
eval_metric='acc',
kvstore=args.kv_store,
monitor=mon,
num_epoch=args.num_epochs,
batch_end_callback=mx.callback.Speedometer(args.batch_size, args.disp_batches),
epoch_end_callback=save_model())
@mx.init.register
class CustomInit(Initializer):
"""
https://mxnet.incubator.apache.org/api/python/optimization.html#mxnet.initializer.register
Create and register a custom initializer that
Initialize the weight and bias with custom requirements
"""
weightMethods = ["normal", "uniform", "orthogonal", "xavier"]
biasMethods = ["costant"]
def __init__(self, kwargs):
self._kwargs = kwargs
super(CustomInit, self).__init__(**kwargs)
def _init_weight(self, name, arr):
if name in self._kwargs.keys():
init_params = self._kwargs[name]
for (k, v) in init_params.items():
if k.lower() == "normal":
random.normal(0, v, out=arr)
elif k.lower() == "uniform":
random.uniform(-v, v, out=arr)
elif k.lower() == "orthogonal":
raise NotImplementedError("Not support at the moment")
elif k.lower() == "xavier":
xa = Xavier(v[0], v[1], v[2])
xa(name, arr)
else:
raise NotImplementedError("Not support")
def _init_bias(self, name, arr):
if name in self._kwargs.keys():
init_params = self._kwargs[name]
for (k, v) in init_params.items():
if k.lower() == "costant":
arr[:] = v
else:
raise NotImplementedError("Not support")
if __name__ == '__main__':
# parse args
args = parser.parse_args()
# data iter
train_iter, valid_iter, sentence_size, embed_size, vocab_size = data_iter(args.batch_size,
args.num_embed,
args.pretrained_embedding)
# network symbol
symbol, data_names, label_names = sym_gen(args.batch_size,
sentence_size,
embed_size,
vocab_size,
num_label=2, filter_list=[3, 4, 5], num_filter=100,
dropout=args.dropout, pre_trained_word2vec=args.pretrained_embedding)
# train cnn model
train(symbol, train_iter, valid_iter, data_names, label_names)
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from unittest import mock
from unittest.mock import MagicMock
from airflow.providers.apache.beam.operators.beam import (
BeamRunGoPipelineOperator,
BeamRunJavaPipelineOperator,
BeamRunPythonPipelineOperator,
)
from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration
from airflow.version import version
TASK_ID = 'test-beam-operator'
DEFAULT_RUNNER = "DirectRunner"
JOB_NAME = 'test-dataflow-pipeline-name'
JOB_ID = 'test-dataflow-pipeline-id'
JAR_FILE = 'gs://my-bucket/example/test.jar'
JOB_CLASS = 'com.test.NotMain'
PY_FILE = 'gs://my-bucket/my-object.py'
PY_INTERPRETER = 'python3'
PY_OPTIONS = ['-m']
GO_FILE = 'gs://my-bucket/example/main.go'
DEFAULT_OPTIONS_PYTHON = DEFAULT_OPTIONS_JAVA = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
}
ADDITIONAL_OPTIONS = {'output': 'gs://test/output', 'labels': {'foo': 'bar'}}
TEST_VERSION = f"v{version.replace('.', '-').replace('+', '-')}"
EXPECTED_ADDITIONAL_OPTIONS = {
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
}
class TestBeamRunPythonPipelineOperator(unittest.TestCase):
def setUp(self):
self.operator = BeamRunPythonPipelineOperator(
task_id=TASK_ID,
py_file=PY_FILE,
py_options=PY_OPTIONS,
default_pipeline_options=DEFAULT_OPTIONS_PYTHON,
pipeline_options=ADDITIONAL_OPTIONS,
)
def test_init(self):
"""Test BeamRunPythonPipelineOperator instance is properly initialized."""
self.assertEqual(self.operator.task_id, TASK_ID)
self.assertEqual(self.operator.py_file, PY_FILE)
self.assertEqual(self.operator.runner, DEFAULT_RUNNER)
self.assertEqual(self.operator.py_options, PY_OPTIONS)
self.assertEqual(self.operator.py_interpreter, PY_INTERPRETER)
self.assertEqual(self.operator.default_pipeline_options, DEFAULT_OPTIONS_PYTHON)
self.assertEqual(self.operator.pipeline_options, EXPECTED_ADDITIONAL_OPTIONS)
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
def test_exec_direct_runner(self, gcs_hook, beam_hook_mock):
"""Test BeamHook is created and the right args are passed to
start_python_workflow.
"""
start_python_hook = beam_hook_mock.return_value.start_python_pipeline
gcs_provide_file = gcs_hook.return_value.provide_file
self.operator.execute(None)
beam_hook_mock.assert_called_once_with(runner=DEFAULT_RUNNER)
expected_options = {
'project': 'test',
'staging_location': 'gs://test/staging',
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
}
gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
start_python_hook.assert_called_once_with(
variables=expected_options,
py_file=gcs_provide_file.return_value.__enter__.return_value.name,
py_options=PY_OPTIONS,
py_interpreter=PY_INTERPRETER,
py_requirements=None,
py_system_site_packages=False,
process_line_callback=None,
)
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowJobLink.persist')
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
def test_exec_dataflow_runner(self, gcs_hook, dataflow_hook_mock, beam_hook_mock, persist_link_mock):
"""Test DataflowHook is created and the right args are passed to
start_python_dataflow.
"""
dataflow_config = DataflowConfiguration()
self.operator.runner = "DataflowRunner"
self.operator.dataflow_config = dataflow_config
gcs_provide_file = gcs_hook.return_value.provide_file
self.operator.execute(None)
job_name = dataflow_hook_mock.build_dataflow_job_name.return_value
dataflow_hook_mock.assert_called_once_with(
gcp_conn_id=dataflow_config.gcp_conn_id,
delegate_to=dataflow_config.delegate_to,
poll_sleep=dataflow_config.poll_sleep,
impersonation_chain=dataflow_config.impersonation_chain,
drain_pipeline=dataflow_config.drain_pipeline,
cancel_timeout=dataflow_config.cancel_timeout,
wait_until_finished=dataflow_config.wait_until_finished,
)
expected_options = {
'project': dataflow_hook_mock.return_value.project_id,
'job_name': job_name,
'staging_location': 'gs://test/staging',
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
'region': 'us-central1',
}
gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
persist_link_mock.assert_called_once_with(
self.operator,
None,
expected_options['project'],
expected_options['region'],
self.operator.dataflow_job_id,
)
beam_hook_mock.return_value.start_python_pipeline.assert_called_once_with(
variables=expected_options,
py_file=gcs_provide_file.return_value.__enter__.return_value.name,
py_options=PY_OPTIONS,
py_interpreter=PY_INTERPRETER,
py_requirements=None,
py_system_site_packages=False,
process_line_callback=mock.ANY,
)
dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with(
job_id=self.operator.dataflow_job_id,
job_name=job_name,
location='us-central1',
multiple_jobs=False,
)
dataflow_hook_mock.return_value.provide_authorized_gcloud.assert_called_once_with()
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowJobLink.persist')
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
def test_on_kill_dataflow_runner(self, dataflow_hook_mock, _, __, ___):
self.operator.runner = "DataflowRunner"
dataflow_cancel_job = dataflow_hook_mock.return_value.cancel_job
self.operator.execute(None)
self.operator.dataflow_job_id = JOB_ID
self.operator.on_kill()
dataflow_cancel_job.assert_called_once_with(
job_id=JOB_ID, project_id=self.operator.dataflow_config.project_id
)
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
def test_on_kill_direct_runner(self, _, dataflow_mock, __):
dataflow_cancel_job = dataflow_mock.return_value.cancel_job
self.operator.execute(None)
self.operator.on_kill()
dataflow_cancel_job.assert_not_called()
class TestBeamRunJavaPipelineOperator(unittest.TestCase):
def setUp(self):
self.operator = BeamRunJavaPipelineOperator(
task_id=TASK_ID,
jar=JAR_FILE,
job_class=JOB_CLASS,
default_pipeline_options=DEFAULT_OPTIONS_JAVA,
pipeline_options=ADDITIONAL_OPTIONS,
)
def test_init(self):
"""Test BeamRunJavaPipelineOperator instance is properly initialized."""
self.assertEqual(self.operator.task_id, TASK_ID)
self.assertEqual(self.operator.runner, DEFAULT_RUNNER)
self.assertEqual(self.operator.default_pipeline_options, DEFAULT_OPTIONS_JAVA)
self.assertEqual(self.operator.job_class, JOB_CLASS)
self.assertEqual(self.operator.jar, JAR_FILE)
self.assertEqual(self.operator.pipeline_options, ADDITIONAL_OPTIONS)
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
def test_exec_direct_runner(self, gcs_hook, beam_hook_mock):
"""Test BeamHook is created and the right args are passed to
start_java_workflow.
"""
start_java_hook = beam_hook_mock.return_value.start_java_pipeline
gcs_provide_file = gcs_hook.return_value.provide_file
self.operator.execute(None)
beam_hook_mock.assert_called_once_with(runner=DEFAULT_RUNNER)
gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
start_java_hook.assert_called_once_with(
variables={**DEFAULT_OPTIONS_JAVA, **ADDITIONAL_OPTIONS},
jar=gcs_provide_file.return_value.__enter__.return_value.name,
job_class=JOB_CLASS,
process_line_callback=None,
)
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowJobLink.persist')
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
def test_exec_dataflow_runner(self, gcs_hook, dataflow_hook_mock, beam_hook_mock, persist_link_mock):
"""Test DataflowHook is created and the right args are passed to
start_java_dataflow.
"""
dataflow_config = DataflowConfiguration()
self.operator.runner = "DataflowRunner"
self.operator.dataflow_config = dataflow_config
gcs_provide_file = gcs_hook.return_value.provide_file
dataflow_hook_mock.return_value.is_job_dataflow_running.return_value = False
self.operator.execute(None)
job_name = dataflow_hook_mock.build_dataflow_job_name.return_value
dataflow_hook_mock.assert_called_once_with(
gcp_conn_id=dataflow_config.gcp_conn_id,
delegate_to=dataflow_config.delegate_to,
poll_sleep=dataflow_config.poll_sleep,
impersonation_chain=dataflow_config.impersonation_chain,
drain_pipeline=dataflow_config.drain_pipeline,
cancel_timeout=dataflow_config.cancel_timeout,
wait_until_finished=dataflow_config.wait_until_finished,
)
gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
expected_options = {
'project': dataflow_hook_mock.return_value.project_id,
'jobName': job_name,
'stagingLocation': 'gs://test/staging',
'region': 'us-central1',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
'output': 'gs://test/output',
}
persist_link_mock.assert_called_once_with(
self.operator,
None,
expected_options['project'],
expected_options['region'],
self.operator.dataflow_job_id,
)
beam_hook_mock.return_value.start_java_pipeline.assert_called_once_with(
variables=expected_options,
jar=gcs_provide_file.return_value.__enter__.return_value.name,
job_class=JOB_CLASS,
process_line_callback=mock.ANY,
)
dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with(
job_id=self.operator.dataflow_job_id,
job_name=job_name,
location='us-central1',
multiple_jobs=False,
project_id=dataflow_hook_mock.return_value.project_id,
)
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowJobLink.persist')
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
def test_on_kill_dataflow_runner(self, dataflow_hook_mock, _, __, ___):
self.operator.runner = "DataflowRunner"
dataflow_hook_mock.return_value.is_job_dataflow_running.return_value = False
dataflow_cancel_job = dataflow_hook_mock.return_value.cancel_job
self.operator.execute(None)
self.operator.dataflow_job_id = JOB_ID
self.operator.on_kill()
dataflow_cancel_job.assert_called_once_with(
job_id=JOB_ID, project_id=self.operator.dataflow_config.project_id
)
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
def test_on_kill_direct_runner(self, _, dataflow_mock, __):
dataflow_cancel_job = dataflow_mock.return_value.cancel_job
self.operator.execute(None)
self.operator.on_kill()
dataflow_cancel_job.assert_not_called()
class TestBeamRunGoPipelineOperator(unittest.TestCase):
def setUp(self):
self.operator = BeamRunGoPipelineOperator(
task_id=TASK_ID,
go_file=GO_FILE,
default_pipeline_options=DEFAULT_OPTIONS_PYTHON,
pipeline_options=ADDITIONAL_OPTIONS,
)
def test_init(self):
"""Test BeamRunGoPipelineOperator instance is properly initialized."""
self.assertEqual(self.operator.task_id, TASK_ID)
self.assertEqual(self.operator.go_file, GO_FILE)
self.assertEqual(self.operator.runner, DEFAULT_RUNNER)
self.assertEqual(self.operator.default_pipeline_options, DEFAULT_OPTIONS_PYTHON)
self.assertEqual(self.operator.pipeline_options, EXPECTED_ADDITIONAL_OPTIONS)
@mock.patch(
"tempfile.TemporaryDirectory",
return_value=MagicMock(__enter__=MagicMock(return_value='/tmp/apache-beam-go')),
)
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
def test_exec_direct_runner(self, gcs_hook, beam_hook_mock, _):
"""Test BeamHook is created and the right args are passed to
start_go_workflow.
"""
start_go_pipeline_method = beam_hook_mock.return_value.start_go_pipeline
gcs_provide_file_method = gcs_hook.return_value.provide_file
self.operator.execute(None)
beam_hook_mock.assert_called_once_with(runner=DEFAULT_RUNNER)
expected_options = {
'project': 'test',
'staging_location': 'gs://test/staging',
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
}
gcs_provide_file_method.assert_called_once_with(object_url=GO_FILE, dir="/tmp/apache-beam-go")
start_go_pipeline_method.assert_called_once_with(
variables=expected_options,
go_file=gcs_provide_file_method.return_value.__enter__.return_value.name,
process_line_callback=None,
should_init_module=True,
)
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.google.go_module_utils.init_module')
def test_exec_source_on_local_path(self, init_module, beam_hook_mock):
"""
Check that start_go_pipeline is called without initializing the Go module when source is locale.
"""
local_go_file_path = '/tmp/file/path/example.go'
operator = BeamRunGoPipelineOperator(
task_id=TASK_ID,
go_file=local_go_file_path,
)
start_go_pipeline_method = beam_hook_mock.return_value.start_go_pipeline
operator.execute(None)
beam_hook_mock.assert_called_once_with(runner=DEFAULT_RUNNER)
init_module.assert_not_called()
start_go_pipeline_method.assert_called_once_with(
variables={'labels': {'airflow-version': TEST_VERSION}},
go_file=local_go_file_path,
process_line_callback=None,
should_init_module=False,
)
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowJobLink.persist')
@mock.patch(
"tempfile.TemporaryDirectory",
return_value=MagicMock(__enter__=MagicMock(return_value='/tmp/apache-beam-go')),
)
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
def test_exec_dataflow_runner(self, gcs_hook, dataflow_hook_mock, beam_hook_mock, _, persist_link_mock):
"""Test DataflowHook is created and the right args are passed to
start_go_dataflow.
"""
dataflow_config = DataflowConfiguration()
self.operator.runner = "DataflowRunner"
self.operator.dataflow_config = dataflow_config
gcs_provide_file = gcs_hook.return_value.provide_file
self.operator.execute(None)
job_name = dataflow_hook_mock.build_dataflow_job_name.return_value
dataflow_hook_mock.assert_called_once_with(
gcp_conn_id=dataflow_config.gcp_conn_id,
delegate_to=dataflow_config.delegate_to,
poll_sleep=dataflow_config.poll_sleep,
impersonation_chain=dataflow_config.impersonation_chain,
drain_pipeline=dataflow_config.drain_pipeline,
cancel_timeout=dataflow_config.cancel_timeout,
wait_until_finished=dataflow_config.wait_until_finished,
)
expected_options = {
'project': dataflow_hook_mock.return_value.project_id,
'job_name': job_name,
'staging_location': 'gs://test/staging',
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION},
'region': 'us-central1',
}
persist_link_mock.assert_called_once_with(
self.operator,
None,
expected_options['project'],
expected_options['region'],
self.operator.dataflow_job_id,
)
gcs_provide_file.assert_called_once_with(object_url=GO_FILE, dir='/tmp/apache-beam-go')
beam_hook_mock.return_value.start_go_pipeline.assert_called_once_with(
variables=expected_options,
go_file=gcs_provide_file.return_value.__enter__.return_value.name,
process_line_callback=mock.ANY,
should_init_module=True,
)
dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with(
job_id=self.operator.dataflow_job_id,
job_name=job_name,
location='us-central1',
multiple_jobs=False,
)
dataflow_hook_mock.return_value.provide_authorized_gcloud.assert_called_once_with()
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowJobLink.persist')
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
def test_on_kill_dataflow_runner(self, dataflow_hook_mock, _, __, ___):
self.operator.runner = "DataflowRunner"
dataflow_cancel_job = dataflow_hook_mock.return_value.cancel_job
self.operator.execute(None)
self.operator.dataflow_job_id = JOB_ID
self.operator.on_kill()
dataflow_cancel_job.assert_called_once_with(
job_id=JOB_ID, project_id=self.operator.dataflow_config.project_id
)
@mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook')
@mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook')
def test_on_kill_direct_runner(self, _, dataflow_mock, __):
dataflow_cancel_job = dataflow_mock.return_value.cancel_job
self.operator.execute(None)
self.operator.on_kill()
dataflow_cancel_job.assert_not_called()
|
|
import csv
import glob
import json
import os
from collections import namedtuple
import unittest
import tempfile
import shutil
import numpy as np
from ray.cloudpickle import cloudpickle
from ray.tune.logger import (
CSVLoggerCallback,
JsonLoggerCallback,
JsonLogger,
CSVLogger,
TBXLoggerCallback,
TBXLogger,
)
from ray.tune.result import (
EXPR_PARAM_FILE,
EXPR_PARAM_PICKLE_FILE,
EXPR_PROGRESS_FILE,
EXPR_RESULT_FILE,
)
class Trial(namedtuple("MockTrial", ["evaluated_params", "trial_id", "logdir"])):
@property
def config(self):
return self.evaluated_params
def init_logdir(self):
return
def __hash__(self):
return hash(self.trial_id)
def result(t, rew, **kwargs):
results = dict(
time_total_s=t,
episode_reward_mean=rew,
mean_accuracy=rew * 2,
training_iteration=int(t),
)
results.update(kwargs)
return results
class LoggerSuite(unittest.TestCase):
"""Test built-in loggers."""
def setUp(self):
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.test_dir, ignore_errors=True)
def testLegacyCSV(self):
config = {"a": 2, "b": 5, "c": {"c": {"D": 123}, "e": None}}
t = Trial(evaluated_params=config, trial_id="csv", logdir=self.test_dir)
logger = CSVLogger(config=config, logdir=self.test_dir, trial=t)
logger.on_result(result(2, 4))
logger.on_result(result(2, 5))
logger.on_result(result(2, 6, score=[1, 2, 3], hello={"world": 1}))
logger.close()
self._validate_csv_result()
def testCSV(self):
config = {"a": 2, "b": 5, "c": {"c": {"D": 123}, "e": None}}
t = Trial(evaluated_params=config, trial_id="csv", logdir=self.test_dir)
logger = CSVLoggerCallback()
logger.on_trial_result(0, [], t, result(0, 4))
logger.on_trial_result(1, [], t, result(1, 5))
logger.on_trial_result(
2, [], t, result(2, 6, score=[1, 2, 3], hello={"world": 1})
)
logger.on_trial_complete(3, [], t)
self._validate_csv_result()
def _validate_csv_result(self):
results = []
result_file = os.path.join(self.test_dir, EXPR_PROGRESS_FILE)
with open(result_file, "rt") as fp:
reader = csv.DictReader(fp)
for row in reader:
results.append(row)
self.assertEqual(len(results), 3)
self.assertSequenceEqual(
[int(row["episode_reward_mean"]) for row in results], [4, 5, 6]
)
def testJSONLegacyLogger(self):
config = {"a": 2, "b": 5, "c": {"c": {"D": 123}, "e": None}}
t = Trial(evaluated_params=config, trial_id="json", logdir=self.test_dir)
logger = JsonLogger(config=config, logdir=self.test_dir, trial=t)
logger.on_result(result(0, 4))
logger.on_result(result(1, 5))
logger.on_result(result(2, 6, score=[1, 2, 3], hello={"world": 1}))
logger.close()
self._validate_json_result(config)
def testJSON(self):
config = {"a": 2, "b": 5, "c": {"c": {"D": 123}, "e": None}}
t = Trial(evaluated_params=config, trial_id="json", logdir=self.test_dir)
logger = JsonLoggerCallback()
logger.on_trial_result(0, [], t, result(0, 4))
logger.on_trial_result(1, [], t, result(1, 5))
logger.on_trial_result(
2, [], t, result(2, 6, score=[1, 2, 3], hello={"world": 1})
)
logger.on_trial_complete(3, [], t)
self._validate_json_result(config)
def _validate_json_result(self, config):
# Check result logs
results = []
result_file = os.path.join(self.test_dir, EXPR_RESULT_FILE)
with open(result_file, "rt") as fp:
for row in fp.readlines():
results.append(json.loads(row))
self.assertEqual(len(results), 3)
self.assertSequenceEqual(
[int(row["episode_reward_mean"]) for row in results], [4, 5, 6]
)
# Check json saved config file
config_file = os.path.join(self.test_dir, EXPR_PARAM_FILE)
with open(config_file, "rt") as fp:
loaded_config = json.load(fp)
self.assertEqual(loaded_config, config)
# Check pickled config file
config_file = os.path.join(self.test_dir, EXPR_PARAM_PICKLE_FILE)
with open(config_file, "rb") as fp:
loaded_config = cloudpickle.load(fp)
self.assertEqual(loaded_config, config)
def testLegacyTBX(self):
config = {
"a": 2,
"b": [1, 2],
"c": {"c": {"D": 123}},
"d": np.int64(1),
"e": np.bool8(True),
"f": None,
}
t = Trial(evaluated_params=config, trial_id="tbx", logdir=self.test_dir)
logger = TBXLogger(config=config, logdir=self.test_dir, trial=t)
logger.on_result(result(0, 4))
logger.on_result(result(1, 5))
logger.on_result(result(2, 6, score=[1, 2, 3], hello={"world": 1}))
logger.close()
self._validate_tbx_result()
def testTBX(self):
config = {
"a": 2,
"b": [1, 2],
"c": {"c": {"D": 123}},
"int32": np.int32(1),
"int64": np.int64(2),
"bool8": np.bool8(True),
"float32": np.float32(3),
"float64": np.float64(4),
"bad": np.float128(4),
}
t = Trial(evaluated_params=config, trial_id="tbx", logdir=self.test_dir)
logger = TBXLoggerCallback()
logger.on_trial_result(0, [], t, result(0, 4))
logger.on_trial_result(1, [], t, result(1, 5))
logger.on_trial_result(
2, [], t, result(2, 6, score=[1, 2, 3], hello={"world": 1})
)
logger.on_trial_complete(3, [], t)
self._validate_tbx_result(
params=(b"float32", b"float64", b"int32", b"int64", b"bool8"),
excluded_params=(b"bad",),
)
def _validate_tbx_result(self, params=None, excluded_params=None):
try:
from tensorflow.python.summary.summary_iterator import summary_iterator
except ImportError:
print("Skipping rest of test as tensorflow is not installed.")
return
events_file = list(glob.glob(f"{self.test_dir}/events*"))[0]
results = []
excluded_params = excluded_params or []
for event in summary_iterator(events_file):
for v in event.summary.value:
if v.tag == "ray/tune/episode_reward_mean":
results.append(v.simple_value)
elif v.tag == "_hparams_/experiment" and params:
for key in params:
self.assertIn(key, v.metadata.plugin_data.content)
for key in excluded_params:
self.assertNotIn(key, v.metadata.plugin_data.content)
elif v.tag == "_hparams_/session_start_info" and params:
for key in params:
self.assertIn(key, v.metadata.plugin_data.content)
for key in excluded_params:
self.assertNotIn(key, v.metadata.plugin_data.content)
self.assertEqual(len(results), 3)
self.assertSequenceEqual([int(res) for res in results], [4, 5, 6])
def testLegacyBadTBX(self):
config = {"b": (1, 2, 3)}
t = Trial(evaluated_params=config, trial_id="tbx", logdir=self.test_dir)
logger = TBXLogger(config=config, logdir=self.test_dir, trial=t)
logger.on_result(result(0, 4))
logger.on_result(result(2, 4, score=[1, 2, 3], hello={"world": 1}))
with self.assertLogs("ray.tune.logger", level="INFO") as cm:
logger.close()
assert "INFO" in cm.output[0]
def testBadTBX(self):
config = {"b": (1, 2, 3)}
t = Trial(evaluated_params=config, trial_id="tbx", logdir=self.test_dir)
logger = TBXLoggerCallback()
logger.on_trial_result(0, [], t, result(0, 4))
logger.on_trial_result(1, [], t, result(1, 5))
logger.on_trial_result(
2, [], t, result(2, 6, score=[1, 2, 3], hello={"world": 1})
)
with self.assertLogs("ray.tune.logger", level="INFO") as cm:
logger.on_trial_complete(3, [], t)
assert "INFO" in cm.output[0]
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__] + sys.argv[1:]))
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import io
import unittest
from unittest.mock import call, patch
from airflow import AirflowException
from airflow.models import Connection
from airflow.providers.apache.spark.hooks.spark_submit import SparkSubmitHook
from airflow.utils import db
class TestSparkSubmitHook(unittest.TestCase):
_spark_job_file = 'test_application.py'
_config = {
'conf': {
'parquet.compression': 'SNAPPY'
},
'conn_id': 'default_spark',
'files': 'hive-site.xml',
'py_files': 'sample_library.py',
'archives': 'sample_archive.zip#SAMPLE',
'jars': 'parquet.jar',
'packages': 'com.databricks:spark-avro_2.11:3.2.0',
'exclude_packages': 'org.bad.dependency:1.0.0',
'repositories': 'http://myrepo.org',
'total_executor_cores': 4,
'executor_cores': 4,
'executor_memory': '22g',
'keytab': 'privileged_user.keytab',
'principal': 'user/spark@airflow.org',
'proxy_user': 'sample_user',
'name': 'spark-job',
'num_executors': 10,
'verbose': True,
'driver_memory': '3g',
'java_class': 'com.foo.bar.AppMain',
'application_args': [
'-f', 'foo',
'--bar', 'bar',
'--with-spaces', 'args should keep embdedded spaces',
'baz'
]
}
@staticmethod
def cmd_args_to_dict(list_cmd):
return_dict = {}
for arg in list_cmd:
if arg.startswith("--"):
pos = list_cmd.index(arg)
return_dict[arg] = list_cmd[pos + 1]
return return_dict
def setUp(self):
db.merge_conn(
Connection(
conn_id='spark_yarn_cluster', conn_type='spark',
host='yarn://yarn-master',
extra='{"queue": "root.etl", "deploy-mode": "cluster"}')
)
db.merge_conn(
Connection(
conn_id='spark_k8s_cluster', conn_type='spark',
host='k8s://https://k8s-master',
extra='{"spark-home": "/opt/spark", ' +
'"deploy-mode": "cluster", ' +
'"namespace": "mynamespace"}')
)
db.merge_conn(
Connection(
conn_id='spark_default_mesos', conn_type='spark',
host='mesos://host', port=5050)
)
db.merge_conn(
Connection(
conn_id='spark_home_set', conn_type='spark',
host='yarn://yarn-master',
extra='{"spark-home": "/opt/myspark"}')
)
db.merge_conn(
Connection(
conn_id='spark_home_not_set', conn_type='spark',
host='yarn://yarn-master')
)
db.merge_conn(
Connection(
conn_id='spark_binary_set', conn_type='spark',
host='yarn', extra='{"spark-binary": "custom-spark-submit"}')
)
db.merge_conn(
Connection(
conn_id='spark_binary_and_home_set', conn_type='spark',
host='yarn',
extra='{"spark-home": "/path/to/spark_home", ' +
'"spark-binary": "custom-spark-submit"}')
)
db.merge_conn(
Connection(
conn_id='spark_standalone_cluster', conn_type='spark',
host='spark://spark-standalone-master:6066',
extra='{"spark-home": "/path/to/spark_home", "deploy-mode": "cluster"}')
)
db.merge_conn(
Connection(
conn_id='spark_standalone_cluster_client_mode', conn_type='spark',
host='spark://spark-standalone-master:6066',
extra='{"spark-home": "/path/to/spark_home", "deploy-mode": "client"}')
)
def test_build_spark_submit_command(self):
# Given
hook = SparkSubmitHook(**self._config)
# When
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
expected_build_cmd = [
'spark-submit',
'--master', 'yarn',
'--conf', 'parquet.compression=SNAPPY',
'--files', 'hive-site.xml',
'--py-files', 'sample_library.py',
'--archives', 'sample_archive.zip#SAMPLE',
'--jars', 'parquet.jar',
'--packages', 'com.databricks:spark-avro_2.11:3.2.0',
'--exclude-packages', 'org.bad.dependency:1.0.0',
'--repositories', 'http://myrepo.org',
'--num-executors', '10',
'--total-executor-cores', '4',
'--executor-cores', '4',
'--executor-memory', '22g',
'--driver-memory', '3g',
'--keytab', 'privileged_user.keytab',
'--principal', 'user/spark@airflow.org',
'--proxy-user', 'sample_user',
'--name', 'spark-job',
'--class', 'com.foo.bar.AppMain',
'--verbose',
'test_application.py',
'-f', 'foo',
'--bar', 'bar',
'--with-spaces', 'args should keep embdedded spaces',
'baz'
]
self.assertEqual(expected_build_cmd, cmd)
def test_build_track_driver_status_command(self):
# note this function is only relevant for spark setup matching below condition
# 'spark://' in self._connection['master'] and self._connection['deploy_mode'] == 'cluster'
# Given
hook_spark_standalone_cluster = SparkSubmitHook(
conn_id='spark_standalone_cluster')
hook_spark_standalone_cluster._driver_id = 'driver-20171128111416-0001'
hook_spark_yarn_cluster = SparkSubmitHook(
conn_id='spark_yarn_cluster')
hook_spark_yarn_cluster._driver_id = 'driver-20171128111417-0001'
# When
build_track_driver_status_spark_standalone_cluster = \
hook_spark_standalone_cluster._build_track_driver_status_command()
build_track_driver_status_spark_yarn_cluster = \
hook_spark_yarn_cluster._build_track_driver_status_command()
# Then
expected_spark_standalone_cluster = [
'/usr/bin/curl',
'--max-time',
'30',
'http://spark-standalone-master:6066/v1/submissions/status/driver-20171128111416-0001']
expected_spark_yarn_cluster = [
'spark-submit', '--master', 'yarn://yarn-master', '--status', 'driver-20171128111417-0001']
assert expected_spark_standalone_cluster == build_track_driver_status_spark_standalone_cluster
assert expected_spark_yarn_cluster == build_track_driver_status_spark_yarn_cluster
@patch('airflow.providers.apache.spark.hooks.spark_submit.subprocess.Popen')
def test_spark_process_runcmd(self, mock_popen):
# Given
mock_popen.return_value.stdout = io.StringIO('stdout')
mock_popen.return_value.stderr = io.StringIO('stderr')
mock_popen.return_value.wait.return_value = 0
# When
hook = SparkSubmitHook(conn_id='')
hook.submit()
# Then
self.assertEqual(mock_popen.mock_calls[0],
call(['spark-submit', '--master', 'yarn',
'--name', 'default-name', ''],
stderr=-2, stdout=-1, universal_newlines=True, bufsize=-1))
def test_resolve_should_track_driver_status(self):
# Given
hook_default = SparkSubmitHook(conn_id='')
hook_spark_yarn_cluster = SparkSubmitHook(conn_id='spark_yarn_cluster')
hook_spark_k8s_cluster = SparkSubmitHook(conn_id='spark_k8s_cluster')
hook_spark_default_mesos = SparkSubmitHook(conn_id='spark_default_mesos')
hook_spark_home_set = SparkSubmitHook(conn_id='spark_home_set')
hook_spark_home_not_set = SparkSubmitHook(conn_id='spark_home_not_set')
hook_spark_binary_set = SparkSubmitHook(conn_id='spark_binary_set')
hook_spark_binary_and_home_set = SparkSubmitHook(
conn_id='spark_binary_and_home_set')
hook_spark_standalone_cluster = SparkSubmitHook(
conn_id='spark_standalone_cluster')
# When
should_track_driver_status_default = hook_default \
._resolve_should_track_driver_status()
should_track_driver_status_spark_yarn_cluster = hook_spark_yarn_cluster \
._resolve_should_track_driver_status()
should_track_driver_status_spark_k8s_cluster = hook_spark_k8s_cluster \
._resolve_should_track_driver_status()
should_track_driver_status_spark_default_mesos = hook_spark_default_mesos \
._resolve_should_track_driver_status()
should_track_driver_status_spark_home_set = hook_spark_home_set \
._resolve_should_track_driver_status()
should_track_driver_status_spark_home_not_set = hook_spark_home_not_set \
._resolve_should_track_driver_status()
should_track_driver_status_spark_binary_set = hook_spark_binary_set \
._resolve_should_track_driver_status()
should_track_driver_status_spark_binary_and_home_set = \
hook_spark_binary_and_home_set._resolve_should_track_driver_status()
should_track_driver_status_spark_standalone_cluster = \
hook_spark_standalone_cluster._resolve_should_track_driver_status()
# Then
self.assertEqual(should_track_driver_status_default, False)
self.assertEqual(should_track_driver_status_spark_yarn_cluster, False)
self.assertEqual(should_track_driver_status_spark_k8s_cluster, False)
self.assertEqual(should_track_driver_status_spark_default_mesos, False)
self.assertEqual(should_track_driver_status_spark_home_set, False)
self.assertEqual(should_track_driver_status_spark_home_not_set, False)
self.assertEqual(should_track_driver_status_spark_binary_set, False)
self.assertEqual(should_track_driver_status_spark_binary_and_home_set, False)
self.assertEqual(should_track_driver_status_spark_standalone_cluster, True)
def test_resolve_connection_yarn_default(self):
# Given
hook = SparkSubmitHook(conn_id='')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
dict_cmd = self.cmd_args_to_dict(cmd)
expected_spark_connection = {"master": "yarn",
"spark_binary": "spark-submit",
"deploy_mode": None,
"queue": None,
"spark_home": None,
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(dict_cmd["--master"], "yarn")
def test_resolve_connection_yarn_default_connection(self):
# Given
hook = SparkSubmitHook(conn_id='spark_default')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
dict_cmd = self.cmd_args_to_dict(cmd)
expected_spark_connection = {"master": "yarn",
"spark_binary": "spark-submit",
"deploy_mode": None,
"queue": "root.default",
"spark_home": None,
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(dict_cmd["--master"], "yarn")
self.assertEqual(dict_cmd["--queue"], "root.default")
def test_resolve_connection_mesos_default_connection(self):
# Given
hook = SparkSubmitHook(conn_id='spark_default_mesos')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
dict_cmd = self.cmd_args_to_dict(cmd)
expected_spark_connection = {"master": "mesos://host:5050",
"spark_binary": "spark-submit",
"deploy_mode": None,
"queue": None,
"spark_home": None,
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(dict_cmd["--master"], "mesos://host:5050")
def test_resolve_connection_spark_yarn_cluster_connection(self):
# Given
hook = SparkSubmitHook(conn_id='spark_yarn_cluster')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
dict_cmd = self.cmd_args_to_dict(cmd)
expected_spark_connection = {"master": "yarn://yarn-master",
"spark_binary": "spark-submit",
"deploy_mode": "cluster",
"queue": "root.etl",
"spark_home": None,
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(dict_cmd["--master"], "yarn://yarn-master")
self.assertEqual(dict_cmd["--queue"], "root.etl")
self.assertEqual(dict_cmd["--deploy-mode"], "cluster")
def test_resolve_connection_spark_k8s_cluster_connection(self):
# Given
hook = SparkSubmitHook(conn_id='spark_k8s_cluster')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
dict_cmd = self.cmd_args_to_dict(cmd)
expected_spark_connection = {"spark_home": "/opt/spark",
"queue": None,
"spark_binary": "spark-submit",
"master": "k8s://https://k8s-master",
"deploy_mode": "cluster",
"namespace": "mynamespace"}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(dict_cmd["--master"], "k8s://https://k8s-master")
self.assertEqual(dict_cmd["--deploy-mode"], "cluster")
def test_resolve_connection_spark_home_set_connection(self):
# Given
hook = SparkSubmitHook(conn_id='spark_home_set')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
expected_spark_connection = {"master": "yarn://yarn-master",
"spark_binary": "spark-submit",
"deploy_mode": None,
"queue": None,
"spark_home": "/opt/myspark",
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(cmd[0], '/opt/myspark/bin/spark-submit')
def test_resolve_connection_spark_home_not_set_connection(self):
# Given
hook = SparkSubmitHook(conn_id='spark_home_not_set')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
expected_spark_connection = {"master": "yarn://yarn-master",
"spark_binary": "spark-submit",
"deploy_mode": None,
"queue": None,
"spark_home": None,
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(cmd[0], 'spark-submit')
def test_resolve_connection_spark_binary_set_connection(self):
# Given
hook = SparkSubmitHook(conn_id='spark_binary_set')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
expected_spark_connection = {"master": "yarn",
"spark_binary": "custom-spark-submit",
"deploy_mode": None,
"queue": None,
"spark_home": None,
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(cmd[0], 'custom-spark-submit')
def test_resolve_connection_spark_binary_default_value_override(self):
# Given
hook = SparkSubmitHook(conn_id='spark_binary_set',
spark_binary='another-custom-spark-submit')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
expected_spark_connection = {"master": "yarn",
"spark_binary": "another-custom-spark-submit",
"deploy_mode": None,
"queue": None,
"spark_home": None,
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(cmd[0], 'another-custom-spark-submit')
def test_resolve_connection_spark_binary_default_value(self):
# Given
hook = SparkSubmitHook(conn_id='spark_default')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
expected_spark_connection = {"master": "yarn",
"spark_binary": "spark-submit",
"deploy_mode": None,
"queue": 'root.default',
"spark_home": None,
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(cmd[0], 'spark-submit')
def test_resolve_connection_spark_binary_and_home_set_connection(self):
# Given
hook = SparkSubmitHook(conn_id='spark_binary_and_home_set')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
expected_spark_connection = {"master": "yarn",
"spark_binary": "custom-spark-submit",
"deploy_mode": None,
"queue": None,
"spark_home": "/path/to/spark_home",
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(cmd[0], '/path/to/spark_home/bin/custom-spark-submit')
def test_resolve_connection_spark_standalone_cluster_connection(self):
# Given
hook = SparkSubmitHook(conn_id='spark_standalone_cluster')
# When
connection = hook._resolve_connection()
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
expected_spark_connection = {"master": "spark://spark-standalone-master:6066",
"spark_binary": "spark-submit",
"deploy_mode": "cluster",
"queue": None,
"spark_home": "/path/to/spark_home",
"namespace": None}
self.assertEqual(connection, expected_spark_connection)
self.assertEqual(cmd[0], '/path/to/spark_home/bin/spark-submit')
def test_resolve_spark_submit_env_vars_standalone_client_mode(self):
# Given
hook = SparkSubmitHook(conn_id='spark_standalone_cluster_client_mode',
env_vars={"bar": "foo"})
# When
hook._build_spark_submit_command(self._spark_job_file)
# Then
self.assertEqual(hook._env, {"bar": "foo"})
def test_resolve_spark_submit_env_vars_standalone_cluster_mode(self):
def env_vars_exception_in_standalone_cluster_mode():
# Given
hook = SparkSubmitHook(conn_id='spark_standalone_cluster',
env_vars={"bar": "foo"})
# When
hook._build_spark_submit_command(self._spark_job_file)
# Then
self.assertRaises(AirflowException,
env_vars_exception_in_standalone_cluster_mode)
def test_resolve_spark_submit_env_vars_yarn(self):
# Given
hook = SparkSubmitHook(conn_id='spark_yarn_cluster',
env_vars={"bar": "foo"})
# When
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
self.assertEqual(cmd[4], "spark.yarn.appMasterEnv.bar=foo")
self.assertEqual(hook._env, {"bar": "foo"})
def test_resolve_spark_submit_env_vars_k8s(self):
# Given
hook = SparkSubmitHook(conn_id='spark_k8s_cluster',
env_vars={"bar": "foo"})
# When
cmd = hook._build_spark_submit_command(self._spark_job_file)
# Then
self.assertEqual(cmd[4], "spark.kubernetes.driverEnv.bar=foo")
def test_process_spark_submit_log_yarn(self):
# Given
hook = SparkSubmitHook(conn_id='spark_yarn_cluster')
log_lines = [
'SPARK_MAJOR_VERSION is set to 2, using Spark2',
'WARN NativeCodeLoader: Unable to load native-hadoop library for your ' +
'platform... using builtin-java classes where applicable',
'WARN DomainSocketFactory: The short-circuit local reads feature cannot '
'be used because libhadoop cannot be loaded.',
'INFO Client: Requesting a new application from cluster with 10 NodeManagers',
'INFO Client: Submitting application application_1486558679801_1820 ' +
'to ResourceManager'
]
# When
hook._process_spark_submit_log(log_lines)
# Then
self.assertEqual(hook._yarn_application_id, 'application_1486558679801_1820')
def test_process_spark_submit_log_k8s(self):
# Given
hook = SparkSubmitHook(conn_id='spark_k8s_cluster')
log_lines = [
'INFO LoggingPodStatusWatcherImpl:54 - State changed, new state:' +
'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver' +
'namespace: default' +
'labels: spark-app-selector -> spark-465b868ada474bda82ccb84ab2747fcd,' +
'spark-role -> driver' +
'pod uid: ba9c61f6-205f-11e8-b65f-d48564c88e42' +
'creation time: 2018-03-05T10:26:55Z' +
'service account name: spark' +
'volumes: spark-init-properties, download-jars-volume,' +
'download-files-volume, spark-token-2vmlm' +
'node name: N/A' +
'start time: N/A' +
'container images: N/A' +
'phase: Pending' +
'status: []' +
'2018-03-05 11:26:56 INFO LoggingPodStatusWatcherImpl:54 - State changed,' +
' new state:' +
'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver' +
'namespace: default' +
'Exit code: 999'
]
# When
hook._process_spark_submit_log(log_lines)
# Then
self.assertEqual(hook._kubernetes_driver_pod,
'spark-pi-edf2ace37be7353a958b38733a12f8e6-driver')
self.assertEqual(hook._spark_exit_code, 999)
def test_process_spark_submit_log_standalone_cluster(self):
# Given
hook = SparkSubmitHook(conn_id='spark_standalone_cluster')
log_lines = [
'Running Spark using the REST application submission protocol.',
'17/11/28 11:14:15 INFO RestSubmissionClient: Submitting a request '
'to launch an application in spark://spark-standalone-master:6066',
'17/11/28 11:14:15 INFO RestSubmissionClient: Submission successfully ' +
'created as driver-20171128111415-0001. Polling submission state...'
]
# When
hook._process_spark_submit_log(log_lines)
# Then
self.assertEqual(hook._driver_id, 'driver-20171128111415-0001')
def test_process_spark_driver_status_log(self):
# Given
hook = SparkSubmitHook(conn_id='spark_standalone_cluster')
log_lines = [
'Submitting a request for the status of submission ' +
'driver-20171128111415-0001 in spark://spark-standalone-master:6066',
'17/11/28 11:15:37 INFO RestSubmissionClient: Server responded with ' +
'SubmissionStatusResponse:',
'{',
'"action" : "SubmissionStatusResponse",',
'"driverState" : "RUNNING",',
'"serverSparkVersion" : "1.6.0",',
'"submissionId" : "driver-20171128111415-0001",',
'"success" : true,',
'"workerHostPort" : "172.18.0.7:38561",',
'"workerId" : "worker-20171128110741-172.18.0.7-38561"',
'}'
]
# When
hook._process_spark_status_log(log_lines)
# Then
self.assertEqual(hook._driver_status, 'RUNNING')
@patch('airflow.providers.apache.spark.hooks.spark_submit.subprocess.Popen')
def test_yarn_process_on_kill(self, mock_popen):
# Given
mock_popen.return_value.stdout = io.StringIO('stdout')
mock_popen.return_value.stderr = io.StringIO('stderr')
mock_popen.return_value.poll.return_value = None
mock_popen.return_value.wait.return_value = 0
log_lines = [
'SPARK_MAJOR_VERSION is set to 2, using Spark2',
'WARN NativeCodeLoader: Unable to load native-hadoop library for your ' +
'platform... using builtin-java classes where applicable',
'WARN DomainSocketFactory: The short-circuit local reads feature cannot ' +
'be used because libhadoop cannot be loaded.',
'INFO Client: Requesting a new application from cluster with 10 ' +
'NodeManagerapplication_1486558679801_1820s',
'INFO Client: Submitting application application_1486558679801_1820 ' +
'to ResourceManager'
]
hook = SparkSubmitHook(conn_id='spark_yarn_cluster')
hook._process_spark_submit_log(log_lines)
hook.submit()
# When
hook.on_kill()
# Then
self.assertIn(call(['yarn', 'application', '-kill',
'application_1486558679801_1820'],
stderr=-1, stdout=-1),
mock_popen.mock_calls)
def test_standalone_cluster_process_on_kill(self):
# Given
log_lines = [
'Running Spark using the REST application submission protocol.',
'17/11/28 11:14:15 INFO RestSubmissionClient: Submitting a request ' +
'to launch an application in spark://spark-standalone-master:6066',
'17/11/28 11:14:15 INFO RestSubmissionClient: Submission successfully ' +
'created as driver-20171128111415-0001. Polling submission state...'
]
hook = SparkSubmitHook(conn_id='spark_standalone_cluster')
hook._process_spark_submit_log(log_lines)
# When
kill_cmd = hook._build_spark_driver_kill_command()
# Then
self.assertEqual(kill_cmd[0], '/path/to/spark_home/bin/spark-submit')
self.assertEqual(kill_cmd[1], '--master')
self.assertEqual(kill_cmd[2], 'spark://spark-standalone-master:6066')
self.assertEqual(kill_cmd[3], '--kill')
self.assertEqual(kill_cmd[4], 'driver-20171128111415-0001')
@patch('airflow.kubernetes.kube_client.get_kube_client')
@patch('airflow.providers.apache.spark.hooks.spark_submit.subprocess.Popen')
def test_k8s_process_on_kill(self, mock_popen, mock_client_method):
# Given
mock_popen.return_value.stdout = io.StringIO('stdout')
mock_popen.return_value.stderr = io.StringIO('stderr')
mock_popen.return_value.poll.return_value = None
mock_popen.return_value.wait.return_value = 0
client = mock_client_method.return_value
hook = SparkSubmitHook(conn_id='spark_k8s_cluster')
log_lines = [
'INFO LoggingPodStatusWatcherImpl:54 - State changed, new state:' +
'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver' +
'namespace: default' +
'labels: spark-app-selector -> spark-465b868ada474bda82ccb84ab2747fcd,' +
'spark-role -> driver' +
'pod uid: ba9c61f6-205f-11e8-b65f-d48564c88e42' +
'creation time: 2018-03-05T10:26:55Z' +
'service account name: spark' +
'volumes: spark-init-properties, download-jars-volume,' +
'download-files-volume, spark-token-2vmlm' +
'node name: N/A' +
'start time: N/A' +
'container images: N/A' +
'phase: Pending' +
'status: []' +
'2018-03-05 11:26:56 INFO LoggingPodStatusWatcherImpl:54 - State changed,' +
' new state:' +
'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver' +
'namespace: default' +
'Exit code: 0'
]
hook._process_spark_submit_log(log_lines)
hook.submit()
# When
hook.on_kill()
# Then
import kubernetes
kwargs = {'pretty': True, 'body': kubernetes.client.V1DeleteOptions()}
client.delete_namespaced_pod.assert_called_once_with(
'spark-pi-edf2ace37be7353a958b38733a12f8e6-driver',
'mynamespace', **kwargs)
if __name__ == '__main__':
unittest.main()
|
|
from .exceptions import QueryParameterError
from .utils import ESRange, EqualityComparableUsingAttributeDictionary
from .es import json
class Filter(EqualityComparableUsingAttributeDictionary):
_extra_properties = ("_cache", "cache_key", "_name")
def __init__(self, **kwargs):
self._extra_values = {key: kwargs.pop(key)
for key in self._extra_properties
if kwargs.get(key) is not None}
if kwargs:
raise ValueError("Unknown properties: %s" % kwargs.keys())
def serialize(self):
data = self._serialize()
if self._extra_values:
data.update(self._extra_values)
return {self._internal_name: data}
def _serialize(self):
raise NotImplementedError
@property
def _internal_name(self):
raise NotImplementedError
class FilterList(Filter):
def __init__(self, filters, **kwargs):
super(FilterList, self).__init__(**kwargs)
self.filters = filters
def _serialize(self):
if not self.filters:
raise RuntimeError("A least a filter must be declared")
serialized = [filter.serialize() for filter in self.filters]
if self._extra_values:
serialized = {"filters": serialized}
return serialized
def __iter__(self):
return iter(self.filters)
class ANDFilter(FilterList):
"""
A filter that matches combinations of other filters using the AND operator
Example:
t1 = TermFilter('name', 'john')
t2 = TermFilter('name', 'smith')
f = ANDFilter([t1, t2])
q = FilteredQuery(MatchAllQuery(), f)
results = conn.search(q)
"""
_internal_name = "and"
class ORFilter(FilterList):
"""
A filter that matches combinations of other filters using the OR operator
Example:
t1 = TermFilter('name', 'john')
t2 = TermFilter('name', 'smith')
f = ORFilter([t1, t2])
q = FilteredQuery(MatchAllQuery(), f)
results = conn.search(q)
"""
_internal_name = "or"
class BoolFilter(Filter):
"""
A filter that matches documents matching boolean combinations of other
queries. Similar in concept to Boolean query, except that the clauses are
other filters. Can be placed within queries that accept a filter.
"""
_internal_name = "bool"
def __init__(self, must=None, must_not=None, should=None,
**kwargs):
super(BoolFilter, self).__init__(**kwargs)
self._must = []
self._must_not = []
self._should = []
if must:
self.add_must(must)
if must_not:
self.add_must_not(must_not)
if should:
self.add_should(should)
def add_must(self, queries):
if isinstance(queries, list):
self._must.extend(queries)
else:
self._must.append(queries)
def add_must_not(self, queries):
if isinstance(queries, list):
self._must_not.extend(queries)
else:
self._must_not.append(queries)
def add_should(self, queries):
if isinstance(queries, list):
self._should.extend(queries)
else:
self._should.append(queries)
def is_empty(self):
return not any([self._must, self._must_not, self._should])
def _serialize(self):
filters = {}
if self._must:
filters["must"] = [f.serialize() for f in self._must]
if self._must_not:
filters["must_not"] = [f.serialize() for f in self._must_not]
if self._should:
filters["should"] = [f.serialize() for f in self._should]
if not filters:
raise RuntimeError("A least a filter must be declared")
return filters
class NotFilter(Filter):
_internal_name = "not"
def __init__(self, filter, **kwargs):
super(NotFilter, self).__init__(**kwargs)
self.filter = filter
def _serialize(self):
if not isinstance(self.filter, Filter):
raise RuntimeError("NotFilter argument should be a Filter")
return {"filter": self.filter.serialize()}
class RangeFilter(Filter):
_internal_name = "range"
def __init__(self, qrange=None, **kwargs):
super(RangeFilter, self).__init__(**kwargs)
self.ranges = []
if qrange:
self.add(qrange)
def add(self, qrange):
if isinstance(qrange, list):
self.ranges.extend(qrange)
elif isinstance(qrange, ESRange):
self.ranges.append(qrange)
def negate(self):
"""Negate some ranges: useful to resolve a NotFilter(RangeFilter(**))"""
for r in self.ranges:
r.negate()
def _serialize(self):
if not self.ranges:
raise RuntimeError("A least a range must be declared")
return dict([r.serialize() for r in self.ranges])
NumericRangeFilter = RangeFilter
class PrefixFilter(Filter):
_internal_name = "prefix"
def __init__(self, field=None, prefix=None, **kwargs):
super(PrefixFilter, self).__init__(**kwargs)
self._values = {}
if field is not None and prefix is not None:
self.add(field, prefix)
def add(self, field, prefix):
self._values[field] = prefix
def _serialize(self):
if not self._values:
raise RuntimeError("A least a field/prefix pair must be added")
return self._values
class ScriptFilter(Filter):
_internal_name = "script"
def __init__(self, script, params=None, lang=None, **kwargs):
super(ScriptFilter, self).__init__(**kwargs)
self.script = script
self.params = params
self.lang = lang
def add(self, field, value):
self.params[field] = {"value": value}
def _serialize(self):
data = {"script": self.script}
if self.params is not None:
data["params"] = self.params
if self.lang is not None:
data["lang"] = self.lang
return data
class TermFilter(Filter):
_internal_name = "term"
def __init__(self, field=None, value=None, **kwargs):
super(TermFilter, self).__init__(**kwargs)
self._values = {}
if field is not None and value is not None:
self.add(field, value)
def add(self, field, value):
self._values[field] = value
def _serialize(self):
if not self._values:
raise RuntimeError("A least a field/value pair must be added")
return self._values
class TypeFilter(Filter):
_internal_name = "type"
def __init__(self, type, **kwargs):
super(TypeFilter, self).__init__(**kwargs)
self.type = type
def _serialize(self):
return {"value": self.type}
class ExistsFilter(Filter):
_internal_name = "exists"
def __init__(self, field, **kwargs):
super(ExistsFilter, self).__init__(**kwargs)
self.field = field
def _serialize(self):
return {"field": self.field}
class MissingFilter(Filter):
_internal_name = "missing"
def __init__(self, field, **kwargs):
super(MissingFilter, self).__init__(**kwargs)
self.field = field
def _serialize(self):
return {"field": self.field}
class RegexTermFilter(Filter):
_internal_name = "regex_term"
def __init__(self, field=None, value=None, ignorecase=False, **kwargs):
super(RegexTermFilter, self).__init__(**kwargs)
self._values = {}
self.ignorecase = ignorecase
if field is not None and value is not None:
self.add(field, value, ignorecase=ignorecase)
def add(self, field, value, ignorecase=False):
if ignorecase:
self._values[field] = {"term":value, "ignorecase":ignorecase}
else:
self._values[field] = value
def _serialize(self):
if not self._values:
raise RuntimeError("A least a field/value pair must be added")
return self._values
class LimitFilter(Filter):
_internal_name = "limit"
def __init__(self, value=100, **kwargs):
super(LimitFilter, self).__init__(**kwargs)
self.value = value
def _serialize(self):
return {"value": self.value}
class TermsFilter(Filter):
_internal_name = "terms"
def __init__(self, field=None, values=None, execution=None, **kwargs):
super(TermsFilter, self).__init__(**kwargs)
self._values = {}
self.execution = execution
if field is not None and values is not None:
self.add(field, values)
def add(self, field, values):
self._values[field] = values
def _serialize(self):
if not self._values:
raise RuntimeError("A least a field/value pair must be added")
data = self._values.copy()
if self.execution:
data["execution"] = self.execution
return data
class QueryFilter(Filter):
_internal_name = "query"
def __init__(self, query, **kwargs):
super(QueryFilter, self).__init__(**kwargs)
self._query = query
def _serialize(self):
if not self._query:
raise RuntimeError("A least a field/value pair must be added")
return self._query.serialize()
#
#--- Geo Queries
#http://www.elasticsearch.com/blog/2010/08/16/geo_location_and_search.html
class GeoDistanceFilter(Filter):
"""http://github.com/elasticsearch/elasticsearch/issues/279"""
_internal_name = "geo_distance"
def __init__(self, field, location, distance, distance_type="arc", distance_unit=None, **kwargs):
super(GeoDistanceFilter, self).__init__(**kwargs)
self.field = field
self.location = location
self.distance = distance
self.distance_type = distance_type
self.distance_unit = distance_unit
def _serialize(self):
if self.distance_type not in ["arc", "plane"]:
raise QueryParameterError("Invalid distance_type")
params = {"distance": self.distance, self.field: self.location}
if self.distance_type != "arc":
params["distance_type"] = self.distance_type
if self.distance_unit:
if self.distance_unit not in ["km", "mi", "miles"]:
raise QueryParameterError("Invalid distance_unit")
params["distance_unit"] = self.distance_unit
return params
class GeoBoundingBoxFilter(Filter):
"""http://github.com/elasticsearch/elasticsearch/issues/290"""
_internal_name = "geo_bounding_box"
def __init__(self, field, location_tl, location_br, **kwargs):
super(GeoBoundingBoxFilter, self).__init__(**kwargs)
self.field = field
self.location_tl = location_tl
self.location_br = location_br
def _serialize(self):
return {self.field: {"top_left": self.location_tl,
"bottom_right": self.location_br}}
class GeoPolygonFilter(Filter):
"""http://github.com/elasticsearch/elasticsearch/issues/294"""
_internal_name = "geo_polygon"
def __init__(self, field, points, **kwargs):
super(GeoPolygonFilter, self).__init__(**kwargs)
self.field = field
self.points = points
def _serialize(self):
return {self.field: {"points": self.points}}
class MatchAllFilter(Filter):
"""A filter that matches on all documents"""
_internal_name = "match_all"
def __init__(self, **kwargs):
super(MatchAllFilter, self).__init__(**kwargs)
def _serialize(self):
return {}
class HasFilter(Filter):
def __init__(self, type, query, _scope=None, **kwargs):
super(HasFilter, self).__init__(**kwargs)
self.query = query
self.type = type
self._scope = _scope
def _serialize(self):
data = {"query": self.query.serialize(), "type": self.type}
if self._scope is not None:
data["_scope"] = self._scope
return data
class HasChildFilter(HasFilter):
"""
The has_child filter accepts a query and the child type to run against,
and results in parent documents that have child docs matching the query
"""
_internal_name = "has_child"
class HasParentFilter(HasFilter):
"""
The has_parent filter accepts a query and the parent type to run against,
and results in child documents that have parent docs matching the query
"""
_internal_name = "has_parent"
class NestedFilter(Filter):
"""
A nested filter, works in a similar fashion to the nested query, except
used as a filter. It follows exactly the same structure, but also allows
to cache the results (set _cache to true), and have it named
(set the _name value).
"""
_internal_name = "nested"
def __init__(self, path, filter, **kwargs):
super(NestedFilter, self).__init__(**kwargs)
self.path = path
self.filter = filter
def _serialize(self):
return {"path": self.path, "query": self.filter.serialize()}
class IdsFilter(Filter):
_internal_name = "ids"
def __init__(self, values, type=None, **kwargs):
super(IdsFilter, self).__init__(**kwargs)
self.type = type
self.values = values
def _serialize(self):
data = {}
if self.type:
data["type"] = self.type
if isinstance(self.values, basestring):
data["values"] = [self.values]
else:
data["values"] = self.values
return data
class RawFilter(Filter):
"""Uses exactly the filter provided as an ES filter."""
def __init__(self, filter_text_or_dict, **kwargs):
super(RawFilter, self).__init__(**kwargs)
if isinstance(filter_text_or_dict, basestring):
self._filter = json.loads(filter_text_or_dict)
else:
self._filter = filter_text_or_dict
def serialize(self):
return self._filter
|
|
"""FDTD Postprocessing.
"""
from __future__ import print_function
from builtins import str
from builtins import range
from builtins import object
import os
import numpy
import EMpy.utils
import pylab
__author__ = "Lorenzo Bolla"
class Input(object):
"""Data structure to handle input files."""
def __init__(self, filename):
"""Set the input filename."""
self.filename = filename
def __str__(self):
"""Return a representation of the input file."""
dftmon_str = "%g ! #timemonitors \n" % len(self.dftmonitors)
if len(self.dftmonitors) > 0:
dftmon_str += "".join(
[
"%g %g %g %g %g %g\n%g %g\n"
% (
dm[0][0],
dm[0][1],
dm[0][2],
dm[0][3],
dm[0][4],
dm[0][5],
dm[1][0],
dm[1][1],
)
for dm in self.dftmonitors
]
)
timemon_str = "%g ! #timemonitors \n" % len(self.dftmonitors)
if len(timemon_str) > 0:
timemon_str += "%g %g \n %s" % (
self.timemonitors_time_interval[0],
self.timemonitors_time_interval[1],
"".join(
[
"%g %g %g ! time_monitor #%d\n" % (s[0], s[1], s[2], iss)
for iss, s in enumerate(self.timemonitors)
]
),
)
return (
"%g %g %g %g ! dx dy dz cfl \n"
"%g %g %g %g %g %g %s %g %g ! xmax ymax zmax pmlx pmly pmlz pmltype pmlsmooth pmlref \n"
"%g %g %g %g ! xmax ymax zmax pmlx pmly pmlz \n"
"%g ! output3deps? \n"
"%g ! number diel slices \n"
"%s \n"
"%g ! number field slices \n"
"%s \n"
"%g %g %g ! #dielobjs, index of bg, conductivity of bg \n"
"%s"
"%g ! smoothing method \n"
"%g ! #sources \n"
"%s"
"%g %g %g ! lambdamin, lambdamax, dlambda \n"
"%s"
"%s"
% (
self.dx,
self.dy,
self.dz,
self.cfl,
self.xmax,
self.ymax,
self.zmax,
self.pmlx,
self.pmly,
self.pmlz,
self.pmltype,
self.pmlsmooth,
self.pmlref,
self.start,
self.end,
self.slides,
self.snapshot,
self.output3deps,
len(self.dielslices),
"\n".join(
[
"%g %g %g ! dielslice #%d" % (d[0], d[1], d[2], dd)
for (dd, d) in enumerate(self.dielslices)
]
),
len(self.fieldslices),
"\n".join(
[
"%g %g %g ! fieldslice #%d" % (f[0], f[1], f[2], ff)
for (ff, f) in enumerate(self.fieldslices)
]
),
len(self.dielobjs),
self.bgrix,
self.bgsigma,
"".join(["%s %s\n" % obj for obj in self.dielobjs]),
self.smoothing_method,
len(self.sources),
"".join(["%s\n%s\n%s\n%s\n" % src for src in self.sources]),
self.lambdamin,
self.lambdamax,
self.dlambda,
dftmon_str,
timemon_str,
)
)
def tofile(self, filename=None):
"""Save the input data to the input file."""
if filename is None:
filename = self.filename
f = open(filename, "w")
f.write(self.__str__())
f.close()
class Param(object):
"""Data structure to handle the param file."""
def __str__(self):
"""Return a representation of the input file."""
return (
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%g\n"
"%s"
% (
self.dx,
self.dy,
self.dz,
self.dt,
self.mx,
self.my,
self.mz,
self.pmlx,
self.pmly,
self.pmlz,
self.nflux,
self.ntime,
self.step1,
self.step2,
"\n".join(
[
"%d\n%d\n%d\n%d\n%d\n%d"
% (
dm["direction"],
dm["nfreq"],
dm["flxlim"][0],
dm["flxlim"][1],
dm["flxlim"][2],
dm["flxlim"][3],
)
for dm in self.dftmonitors
]
),
)
)
class Sensor(object):
"""Data structure to handle the FFT sensor's data."""
def plot(self, n):
"""Plot the sensor's fields."""
pylab.clf()
pylab.hot()
pylab.subplot(2, 2, 1)
pylab.contour(numpy.abs(self.E1[:, :, n].T), 16)
pylab.axis("image")
pylab.title("E1")
pylab.subplot(2, 2, 2)
pylab.contour(numpy.abs(self.H1[:, :, n].T), 16)
pylab.axis("image")
pylab.title("H1")
pylab.subplot(2, 2, 3)
pylab.contour(numpy.abs(self.E2[:, :, n].T), 16)
pylab.axis("image")
pylab.title("E2")
pylab.subplot(2, 2, 4)
pylab.contour(numpy.abs(self.H2[:, :, n].T), 16)
pylab.axis("image")
pylab.title("H2")
pylab.show()
def __str__(self):
"""Return a representation of the sensor."""
return "E1\n%s\nH1\n%s\nE2\n%s\nH2\n%s\n" % (self.E1, self.H1, self.E2, self.H2)
class TimeSensor(object):
"""Data structure to handle the time sensor's data."""
def plot_Ex(self, logplot=False):
self.__plot_field(self.Ex, logplot)
def plot_Ey(self, logplot=False):
self.__plot_field(self.Ey, logplot)
def plot_Ez(self, logplot=False):
self.__plot_field(self.Ez, logplot)
def plot_Hx(self, logplot=False):
self.__plot_field(self.Hx, logplot)
def plot_Hy(self, logplot=False):
self.__plot_field(self.Hy, logplot)
def plot_Hz(self, logplot=False):
self.__plot_field(self.Hz, logplot)
def __plot_field(self, field, logplot=False):
if logplot:
data = 20 * numpy.log10(1e-20 + numpy.abs(field))
pylab.plot(self.t, data)
else:
data = field
pylab.plot(self.t, data)
pylab.show()
class FDTD(object):
"""FDTD.
Data structure to handle an FDTD simulation. It manages an input file, a param file and the sensors' output.
It can run a simulation via a system call.
"""
def __init__(self):
self.input = None
self.param = None
self.sensors = None
def fetch_data(
self,
remote_dir_="./",
input_file="inp.txt",
param_file="param",
directory_="./",
):
remote_dir = fixdir(remote_dir_)
directory = fixdir(directory_)
# input file
os.system(
"scp -C bollalo001@pico:" + remote_dir + "/" + input_file + " " + directory
)
# param file
os.system(
"scp -C bollalo001@pico:" + remote_dir + "/" + param_file + " " + directory
)
# fieldslices, flux and time sensors
os.system(
"scp -C bollalo001@pico:" + remote_dir + "/[EHeh]*_*" + " " + directory
)
# dielslices
os.system("scp -C bollalo001@pico:" + remote_dir + "/diel*" + " " + directory)
def put_data(self, remote_dir_="./", input_file="inp.txt", directory_="./"):
remote_dir = fixdir(remote_dir_)
directory = fixdir(directory_)
# input file
os.system("scp -C" + directory + input_file + " bollalo001@pico:" + remote_dir)
# .dat modesolver's files
os.system("scp -C" + directory + "*.dat bollalo001@pico:" + remote_dir)
def load(
self, directory_="./", input_file="inp.txt", param_file="param", remote_dir_=""
):
"""Load input, param and sensors."""
remote_dir = fixdir(remote_dir_)
directory = fixdir(directory_)
if remote_dir != "":
self.fetch_data(remote_dir, input_file, param_file, directory)
self.load_input_file(directory, input_file)
self.load_param(directory, param_file)
self.load_sensors(directory)
self.load_time_sensors(directory)
def load_input_file(self, directory_="./", filename="inp.txt"):
"""Load input file."""
directory = fixdir(directory_)
try:
f = open(directory + filename)
except Exception:
print("ERROR: input file")
return
inp = Input(filename)
(inp.dx, inp.dy, inp.dz, inp.cfl) = numpy.fromstring(
strip_comment(f.readline()), sep=" "
)
tmp = strip_comment(f.readline())
tmp_idx = tmp.find("P")
if tmp_idx > 0:
inp.pmltype = "P"
else:
tmp_idx = tmp.find("G")
if tmp_idx > 0:
inp.pmltype = "G"
else:
raise ValueError("wrong pmltype")
(inp.xmax, inp.ymax, inp.zmax, inp.pmlx, inp.pmly, inp.pmlz) = numpy.fromstring(
tmp[:tmp_idx], sep=" "
)
(inp.pmlsmooth, inp.pmlref) = numpy.fromstring(tmp[tmp_idx + 1 :], sep=" ")
(inp.start, inp.end, inp.slides, inp.snapshot) = numpy.fromstring(
strip_comment(f.readline()), sep=" "
)
inp.output3deps = numpy.fromstring(strip_comment(f.readline()), sep=" ")
# dielslices
ndielslices = numpy.fromstring(strip_comment(f.readline()), sep=" ")
inp.dielslices = []
for i in range(ndielslices):
inp.dielslices.append(
numpy.fromstring(strip_comment(f.readline()), sep=" ")
)
# fieldslices
nfieldslices = numpy.fromstring(strip_comment(f.readline()), sep=" ")
inp.fieldslices = []
for i in range(nfieldslices):
inp.fieldslices.append(
numpy.fromstring(strip_comment(f.readline()), sep=" ")
)
# dielobjs
(ndielobjs, inp.bgrix, inp.bgsigma) = numpy.fromstring(
strip_comment(f.readline()), sep=" "
)
inp.dielobjs = []
for i in range(int(ndielobjs)):
inp.dielobjs.append(
(strip_comment(f.readline()), strip_comment(f.readline()))
)
inp.smoothing_method = numpy.fromstring(strip_comment(f.readline()), sep=" ")
# sources
nsources = numpy.fromstring(strip_comment(f.readline()), dtype=int, sep=" ")
inp.sources = []
# (inp.time_dependence, inp.wls, inp.pwidth, inp.shift) = numpy.fromstring(strip_comment(f.readline()), sep = ' ')
for i in range(nsources):
inp.sources.append(
(
strip_comment(f.readline()),
strip_comment(f.readline()),
strip_comment(f.readline()),
strip_comment(f.readline()),
)
)
# dft monitors
(inp.lambdamin, inp.lambdamax, inp.dlambda) = numpy.fromstring(
strip_comment(f.readline()), sep=" "
)
ndftmonitors = numpy.fromstring(strip_comment(f.readline()), dtype=int, sep=" ")
inp.dftmonitors = []
for i in range(ndftmonitors):
inp.dftmonitors.append(
(
numpy.fromstring(strip_comment(f.readline()), sep=" "),
numpy.fromstring(strip_comment(f.readline()), sep=" "),
)
)
# time monitors
ntimemonitors = numpy.fromstring(strip_comment(f.readline()), sep=" ")
inp.timemonitors_time_interval = numpy.fromstring(
strip_comment(f.readline()), sep=" "
)
inp.timemonitors = []
for i in range(ntimemonitors):
inp.timemonitors.append(
numpy.fromstring(strip_comment(f.readline()), sep=" ")
)
f.close()
self.input = inp
def load_param(self, directory_="./", filename="param"):
"""Load param file."""
directory = fixdir(directory_)
param = Param()
try:
data = numpy.fromfile(directory + filename, sep=" ")
except Exception:
print("ERROR: param file")
return
param.dx, param.dy, param.dz, param.dt = data[0:4]
(
param.mx,
param.my,
param.mz,
param.pmlx,
param.pmly,
param.pmlz,
param.nflux,
param.ntime,
param.step1,
param.step2,
) = data[4:14].astype(numpy.int32)
param.dftmonitors = []
for iflux in range(int(param.nflux)):
direction, nfreq = data[14 + iflux * 6 : 16 + iflux * 6]
flxlim = data[16 + iflux * 6 : 20 + iflux * 6]
param.dftmonitors.append(
{"direction": int(direction), "nfreq": int(nfreq), "flxlim": flxlim}
)
self.param = param
def load_time_sensors(self, directory_="./"):
"""Load time sensors."""
directory = fixdir(directory_)
time_sensors = []
if self.param is None:
self.load_param(directory)
for itime in range(self.param.ntime):
tmp = TimeSensor()
tmp.Ex = load_fortran_unformatted(directory + "Ex_time_%02d" % (itime + 1))
tmp.Ey = load_fortran_unformatted(directory + "Ey_time_%02d" % (itime + 1))
tmp.Ez = load_fortran_unformatted(directory + "Ez_time_%02d" % (itime + 1))
tmp.Hx = load_fortran_unformatted(directory + "Hx_time_%02d" % (itime + 1))
tmp.Hy = load_fortran_unformatted(directory + "Hy_time_%02d" % (itime + 1))
tmp.Hz = load_fortran_unformatted(directory + "Hz_time_%02d" % (itime + 1))
tmp.t = self.param.dt * numpy.arange(len(tmp.Ex))
time_sensors.append(tmp)
self.time_sensors = time_sensors
def load_sensors(self, directory_="./"):
"""Load sensors."""
directory = fixdir(directory_)
sensors = []
if self.param is None:
self.load_param(directory)
for iflux in range(self.param.nflux):
tmp = Sensor()
dm = self.param.dftmonitors[iflux]
tmp.E1 = load_fortran_unformatted(directory + "E1_%02d" % (iflux + 1))
tmp.H1 = load_fortran_unformatted(directory + "H1_%02d" % (iflux + 1))
tmp.E2 = load_fortran_unformatted(directory + "E2_%02d" % (iflux + 1))
tmp.H2 = load_fortran_unformatted(directory + "H2_%02d" % (iflux + 1))
# [tmp.E1, tmp.H1, tmp.E2, tmp.H2] = map(lambda x: x[0::2] + 1j * x[1::2], [tmp.E1, tmp.H1, tmp.E2, tmp.H2])
# more memory efficient!
tmp.E1 = tmp.E1[0::2] + 1j * tmp.E1[1::2]
tmp.H1 = tmp.H1[0::2] + 1j * tmp.H1[1::2]
tmp.E2 = tmp.E2[0::2] + 1j * tmp.E2[1::2]
tmp.H2 = tmp.H2[0::2] + 1j * tmp.H2[1::2]
n1 = dm["flxlim"][1] - dm["flxlim"][0] + 1
n2 = dm["flxlim"][3] - dm["flxlim"][2] + 1
tmp.E1 = tmp.E1.reshape((n1, n2 + 1, dm["nfreq"]), order="F")
tmp.H1 = tmp.H1.reshape((n1, n2 + 1, dm["nfreq"]), order="F")
tmp.E2 = tmp.E2.reshape((n1 + 1, n2, dm["nfreq"]), order="F")
tmp.H2 = tmp.H2.reshape((n1 + 1, n2, dm["nfreq"]), order="F")
if dm["direction"] == 1:
# sensors in the x-direction
tmp.dx1 = self.param.dy
tmp.dx2 = self.param.dz
elif dm["direction"] == 2:
# sensors in the y-direction
tmp.dx1 = self.param.dx
tmp.dx2 = self.param.dz
elif dm["direction"] == 3:
# sensors in the z-direction
tmp.dx1 = self.param.dx
tmp.dx2 = self.param.dy
else:
raise ValueError("wrong direction")
sensors.append(tmp)
self.sensors = sensors
def viz2D(self, filename, directory_="./", const_dir="z", logplot=False):
"""Visualize a slice."""
directory = fixdir(directory_)
data = load_fortran_unformatted(directory + filename)
if self.param is None:
self.load_param(directory)
x = numpy.linspace(
self.param.dx / 2.0,
self.param.dx * self.param.mx - self.param.dx / 2.0,
self.param.mx,
)
y = numpy.linspace(
self.param.dy / 2.0,
self.param.dy * self.param.my - self.param.dy / 2.0,
self.param.my,
)
z = numpy.linspace(
self.param.dz / 2.0,
self.param.dz * self.param.mz - self.param.dz / 2.0,
self.param.mz,
)
if const_dir == "x":
n1 = self.param.my
n2 = self.param.mz
x1 = y
x2 = z
x1label = "y"
x2label = "z"
elif const_dir == "y":
n1 = self.param.mx
n2 = self.param.mz
x1 = x
x2 = z
x1label = "x"
x2label = "z"
else:
n1 = self.param.mx
n2 = self.param.my
x1 = x
x2 = y
x1label = "x"
x2label = "y"
data = data.reshape((n2, n1))
pylab.clf()
if logplot:
data = 20 * numpy.log10(numpy.abs(data).clip(1e-30, 1e30))
pylab.jet()
else:
pylab.hot()
pylab.contour(x1, x2, data, 64)
pylab.colorbar()
pylab.axis("image")
pylab.xlabel(x1label + " /um")
pylab.ylabel(x2label + " /um")
pylab.show()
def memory(self):
"""Estimate the memory occupation."""
# size_of_char = 1
# size_of_int = 4
size_of_real = 4
# size_of_complex = 2 * size_of_real
# size_of_dielobj = size_of_int + 31 * size_of_real + 2 * 16 * size_of_char
# size_of_source = 9 * size_of_int + 5 * size_of_real + 6 * 16 * size_of_char
# size_of_monitor = (6 + 2) * 6 * size_of_int
Gb = 1024 ** 3
max_available_RAM = 32 * Gb
dynamic_alloc_memory = 0
# epsx, epsy, epsz
dynamic_alloc_memory = (
dynamic_alloc_memory
+ 3
* (self.param.mx + 2 * self.input.pmlx)
* (self.param.my + 2 * self.input.pmly)
* (self.param.mz + 2 * self.input.pmlz)
* size_of_real
)
# sigma
dynamic_alloc_memory = (
dynamic_alloc_memory
+ 2
* (self.param.mx + 2 * self.input.pmlx)
* (self.param.my + 2 * self.input.pmly)
* (self.param.mz + 2 * self.input.pmlz)
* size_of_real
)
# cex, cmx
dynamic_alloc_memory = (
dynamic_alloc_memory
+ 2 * 2 * (self.param.mx + 2 * self.input.pmlx) * size_of_real
)
# cey, cmy
dynamic_alloc_memory = (
dynamic_alloc_memory
+ 2 * 2 * (self.param.my + 2 * self.input.pmly) * size_of_real
)
# cez, cmz
dynamic_alloc_memory = (
dynamic_alloc_memory
+ 2 * 2 * (self.param.mz + 2 * self.input.pmlz) * size_of_real
)
# exy, exz, eyx, eyz, ...
dynamic_alloc_memory = (
dynamic_alloc_memory
+ 12
* (self.param.mx + 2 * self.input.pmlx)
* (self.param.my + 2 * self.input.pmly)
* (self.param.mz + 2 * self.input.pmlz)
* size_of_real
)
print(
"Alloc mem = %g Gb, [%d%%]"
% (
1.0 * dynamic_alloc_memory / Gb,
int(1.0 * dynamic_alloc_memory / max_available_RAM * 100),
)
)
def run(
self,
directory_="./",
exe_file="/xlv1/labsoi_devices/devices/f3d",
output_file="output",
ncpu=12,
bg=False,
remote=True,
):
"""Run the simulation, possibly in remote."""
directory = fixdir(directory_)
# os.environ['OMP_NUM_THREAD'] = str(ncpu)
# cmd = 'dplace -x6 ' + exe_file + ' > ' + output_file
cmd = (
"cd"
+ directory
+ "; setenv OMP_NUM_THREAD"
+ str(ncpu)
+ "dplace -x6 "
+ exe_file
+ " > "
+ output_file
)
if bg:
cmd += "&"
if remote:
cmd = 'ssh pico "' + cmd + '"'
os.system(cmd)
def __str__(self):
"""Return a representation of the FDTD data structure."""
return "INPUT\n%s\nPARAM\n%s\nSENSORS\n%s\n" % (
self.input,
self.param,
self.sensors,
)
def load_fortran_unformatted(filename):
"""Load data from an unformatted fortran binary file."""
try:
f = open(filename, "rb")
except Exception:
print("ERROR")
return
nbytes = numpy.fromfile(f, dtype=numpy.int32, count=1)
n = nbytes / numpy.float32().nbytes
data = numpy.fromfile(f, dtype=numpy.float32, count=n)
f.close()
return data
def strip_comment(line):
"""Get rid of fortran comments."""
idx = line.find("!")
if idx != -1:
return line[:idx].strip()
return line
def fixdir(str, sep="/"):
tmp = str
if len(tmp) > 0:
if tmp[-1] != sep:
tmp += sep
return tmp
# def overlap_f(simul, solver, nwl):
# vu = numpy.zeros((len(simul.sensors), len(solver.modes)), dtype=complex)
# ju = numpy.zeros((len(simul.sensors), len(solver.modes)), dtype=complex)
# for isens, sens in enumerate(simul.sensors):
# for imode, mode in enumerate(solver.modes):
# Ex, Ey, Ez, Hx, Hy, Hz = mode.get_fields_for_FDTD()
# vu[isens, imode] = 0.5 * (
# numpy.trapz(numpy.trapz(sens.E1[:,1:-1,nwl] * Hy, dx=sens.dx2*1e-6), dx=sens.dx1*1e-6) -
# numpy.trapz(numpy.trapz(sens.E2[1:-1,:,nwl] * Hx, dx=sens.dx2*1e-6), dx=sens.dx1*1e-6))
# ju[isens, imode] = 0.5 * (
# numpy.trapz(numpy.trapz(sens.H2[1:-1,:,nwl] * Ey, dx=sens.dx2*1e-6), dx=sens.dx1*1e-6) -
# numpy.trapz(numpy.trapz(sens.H1[:,1:-1,nwl] * Ex, dx=sens.dx2*1e-6), dx=sens.dx1*1e-6))
# A = (vu + ju) / 2.
# B = (vu - ju) / 2.
# Pm = numpy.abs(A)**2 - numpy.abs(B)**2
# P = Pm.sum(axis=1)
# return (vu, ju, A, B, Pm, P)
def overlap_f(sensors, solver, nwl):
vu = numpy.zeros((len(sensors), len(solver.modes)), dtype=complex)
ju = numpy.zeros((len(sensors), len(solver.modes)), dtype=complex)
for isens, sens in enumerate(sensors):
x = sens.dx1 * numpy.arange(sens.E2.shape[0])
y = sens.dx2 * numpy.arange(sens.E1.shape[1])
for imode, mode in enumerate(solver.modes):
# resample the mode to the sensor's grid
Ex, Ey, Ez, Hx, Hy, Hz = mode.get_fields_for_FDTD(x, y)
# vu[isens, imode] = 0.5 * (
# numpy.trapz(numpy.trapz(sens.E1[:,1:-1,nwl] * Hy, dx=sens.dx2*1e-6), dx=sens.dx1*1e-6) -
# numpy.trapz(numpy.trapz(sens.E2[1:-1,:,nwl] * Hx, dx=sens.dx2*1e-6), dx=sens.dx1*1e-6))
# ju[isens, imode] = 0.5 * (
# numpy.trapz(numpy.trapz(sens.H2[1:-1,:,nwl] * Ey, dx=sens.dx2*1e-6), dx=sens.dx1*1e-6) -
# numpy.trapz(numpy.trapz(sens.H1[:,1:-1,nwl] * Ex, dx=sens.dx2*1e-6), dx=sens.dx1*1e-6))
vu[isens, imode] = 0.5 * (
EMpy.utils.trapz2(
sens.E1[:, 1:-1, nwl] * Hy, dx=sens.dx1 * 1e-6, dy=sens.dx2 * 1e-6
)
- EMpy.utils.trapz2(
sens.E2[1:-1, :, nwl] * Hx, dx=sens.dx1 * 1e-6, dy=sens.dx2 * 1e-6
)
)
ju[isens, imode] = 0.5 * (
EMpy.utils.trapz2(
sens.H2[1:-1, :, nwl] * Ey, dx=sens.dx1 * 1e-6, dy=sens.dx1 * 1e-6
)
- EMpy.utils.trapz2(
sens.H1[:, 1:-1, nwl] * Ex, dx=sens.dx1 * 1e-6, dy=sens.dx1 * 1e-6
)
)
A = (vu + ju) / 2.0
B = (vu - ju) / 2.0
Pm = numpy.abs(A) ** 2 - numpy.abs(B) ** 2
P = Pm.sum(axis=1)
return (vu, ju, A, B, Pm, P)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
import json
import unittest
from magnet.topology import Topology
from magnet.test.utils import create_cmdexecutor
def load_json_file(json_filepath):
obj = {}
with open(json_filepath, 'r') as f:
obj = json.loads(f.read())
return obj
def load_lines(filepath):
lines = []
with codecs.open(filepath, 'r', 'utf-8') as f:
lines = f.readlines()
return lines
class TestTopology(unittest.TestCase):
def test___init__(self):
obj = Topology()
self.assertIsNotNone(obj._channel_dict)
self.assertEquals(0, len(obj._channel_dict))
self.assertIsNotNone(obj._node_dict)
self.assertEquals(0, len(obj._node_dict))
self.assertIsNotNone(obj._app_factory)
def test_append_channel(self):
obj = Topology()
name = 'myChannel'
channel_obj = {
'name': name,
}
obj.append_channel(channel_obj)
self.assertEquals(1, len(obj._channel_dict))
self.assertEquals(name, obj._channel_dict[name].get_name())
def test_append_channel_with_opts(self):
obj = Topology()
name = 'myChannel'
channel_obj = {
'name': name,
'opts': {
'key1': 'value1',
'key2': 'value2',
},
}
obj.append_channel(channel_obj)
self.assertEquals(2, len(obj._channel_dict[name]._opts))
def test_append_node(self):
obj = Topology()
name = 'myNode'
node_obj = {
'name': name,
}
obj.append_node(node_obj)
self.assertEquals(1, len(obj._node_dict))
self.assertEquals(name, obj._node_dict[name].get_name())
def test_append_node_with_opts(self):
obj = Topology()
name = 'myNode'
node_obj = {
'name': name,
'opts': {
'key1': 'value1',
'key2': 'value2',
},
}
obj.append_node(node_obj)
self.assertEquals(2, len(obj._node_dict[name]._opts))
def test_append_node_with_net_device_without_channel(self):
obj = Topology()
name = 'myNode'
node_obj = {
'name': name,
'net_devices': [
{
'name': 'myNetDevice1',
},
{
'name': 'myNetDevice2',
},
],
}
obj.append_node(node_obj)
node = obj._node_dict[name]
self.assertEquals(2, len(node._net_devices))
net_device = node._net_devices['myNetDevice1']
self.assertEquals('myNetDevice1', net_device.get_name())
net_device = node._net_devices['myNetDevice2']
self.assertEquals('myNetDevice2', net_device.get_name())
def test_append_node_with_net_device_and_channel(self):
obj = Topology()
channel_name = 'myChannel'
channel_obj = {
'name': channel_name,
}
node_name = 'myNode'
node_obj = {
'name': node_name,
'net_devices': [
{
'name': 'myNetDevice1',
'channel_name': channel_name,
},
],
}
obj.append_channel(channel_obj)
obj.append_node(node_obj)
node = obj._node_dict[node_name]
channel = obj._channel_dict[channel_name]
net_device = node._net_devices['myNetDevice1']
self.assertIsNotNone(net_device)
self.assertEquals(channel, net_device._channel)
def test_append_node_with_application(self):
obj = Topology()
node_name = 'myNode'
node_obj = {
'name': node_name,
'applications': [
{
'app_name': 'dummy',
},
]
}
obj.append_node(node_obj)
node = obj._node_dict[node_name]
self.assertEquals(1, len(node._applications))
app = node._applications[0]
self.assertEquals(node, app._node)
def test_setup_topology_obj(self):
obj = Topology()
topo_obj = load_json_file('./examples/1host-1gw/topo.json')
obj.setup_topology_obj(topo_obj)
self.assertEquals(1, len(obj._channel_dict))
self.assertEquals(2, len(obj._node_dict))
channel = obj._channel_dict['vbr-pext']
node_qgw = obj._node_dict['qgw']
node_qhost1 = obj._node_dict['qhost1']
self.assertEquals(1, len(node_qgw._net_devices))
self.assertEquals('veth0', node_qgw._net_devices['veth0'].get_name())
self.assertEquals(channel, node_qgw._net_devices['veth0']._channel)
self.assertEquals(1, len(node_qgw._applications))
self.assertEquals('ip_helper', node_qgw._applications[0].get_name())
self.assertEquals(1, len(node_qhost1._net_devices))
self.assertEquals('veth0', node_qhost1._net_devices['veth0'].get_name())
self.assertEquals(channel, node_qhost1._net_devices['veth0']._channel)
self.assertEquals(1, len(node_qhost1._applications))
self.assertEquals('ip_helper', node_qhost1._applications[0].get_name())
def test_create(self):
obj = Topology()
cmdlist = []
cmdexec = create_cmdexecutor(cmdlist)
obj.set_cmdexecutor(cmdexec)
topo_obj = load_json_file('./examples/1host-1gw/topo.json')
obj.setup_topology_obj(topo_obj)
obj.create()
self.assertEquals(True, obj._is_created)
expected_cmdlist = [
line.strip()
for line in load_lines('./magnet/test/create.1host-1gw.sh')
]
self.assertEquals(len(expected_cmdlist), len(cmdlist), cmdlist)
for idx in range(len(expected_cmdlist)):
self.assertEquals(expected_cmdlist[idx], cmdlist[idx])
def test_delete(self):
obj = Topology()
cmdlist = []
cmdexec = create_cmdexecutor(cmdlist)
obj.set_cmdexecutor(cmdexec)
topo_obj = load_json_file('./examples/1host-1gw/topo.json')
obj.setup_topology_obj(topo_obj)
obj.delete()
self.assertEquals(False, obj._is_created)
expected_cmdlist = [
line.strip()
for line in load_lines('./magnet/test/delete.1host-1gw.sh')
]
self.assertEquals(len(expected_cmdlist), len(cmdlist))
for idx in range(len(expected_cmdlist)):
self.assertEquals(expected_cmdlist[idx], cmdlist[idx])
# EOF
|
|
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from functools import partialmethod
from pathlib import Path, PurePath
from textwrap import dedent
from typing import List, Optional
from pants.backend.python.rules import (
download_pex_bin,
pex,
pex_from_target_closure,
prepare_chrooted_python_sources,
pytest_coverage,
pytest_runner,
)
from pants.backend.python.rules.pytest_runner import PytestRunner
from pants.backend.python.subsystems import python_native_code, subprocess_environment
from pants.backend.python.targets.python_library import PythonLibrary
from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary
from pants.backend.python.targets.python_tests import PythonTests
from pants.base.specs import FilesystemLiteralSpec, OriginSpec, SingleAddress
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.engine.fs import FileContent
from pants.engine.interactive_runner import InteractiveRunner
from pants.engine.legacy.structs import PythonTestsAdaptor, PythonTestsAdaptorWithOrigin
from pants.engine.rules import RootRule, subsystem_rule
from pants.engine.selectors import Params
from pants.python.python_requirement import PythonRequirement
from pants.rules.core import determine_source_files, strip_source_roots
from pants.rules.core.test import Status, TestDebugRequest, TestOptions, TestResult
from pants.testutil.interpreter_selection_utils import skip_unless_python27_and_python3_present
from pants.testutil.option.util import create_options_bootstrapper
from pants.testutil.test_base import TestBase
class PythonTestRunnerIntegrationTest(TestBase):
source_root = "tests/python/pants_test"
good_source = FileContent(path="test_good.py", content=b"def test():\n pass\n")
bad_source = FileContent(path="test_bad.py", content=b"def test():\n assert False\n")
py3_only_source = FileContent(path="test_py3.py", content=b"def test() -> None:\n pass\n")
library_source = FileContent(path="library.py", content=b"def add_two(x):\n return x + 2\n")
create_python_library = partialmethod(
TestBase.create_library, path=source_root, target_type="python_library",
)
def write_file(self, file_content: FileContent) -> None:
self.create_file(
relpath=PurePath(self.source_root, file_content.path).as_posix(),
contents=file_content.content.decode(),
)
def create_basic_library(self) -> None:
self.create_python_library(name="library", sources=[self.library_source.path])
self.write_file(self.library_source)
def create_python_test_target(
self,
source_files: List[FileContent],
*,
dependencies: Optional[List[str]] = None,
interpreter_constraints: Optional[str] = None,
) -> None:
self.add_to_build_file(
relpath=self.source_root,
target=dedent(
f"""\
python_tests(
name='target',
dependencies={dependencies or []},
compatibility={[interpreter_constraints] if interpreter_constraints else []},
)
"""
),
)
for source_file in source_files:
self.write_file(source_file)
def setup_thirdparty_dep(self) -> None:
self.add_to_build_file(
relpath="3rdparty/python",
target=dedent(
"""\
python_requirement_library(
name='ordered-set',
requirements=[python_requirement('ordered-set==3.1.1')],
)
"""
),
)
@classmethod
def alias_groups(cls) -> BuildFileAliases:
return BuildFileAliases(
targets={
"python_library": PythonLibrary,
"python_tests": PythonTests,
"python_requirement_library": PythonRequirementLibrary,
},
objects={"python_requirement": PythonRequirement,},
)
@classmethod
def rules(cls):
return (
*super().rules(),
*pytest_coverage.rules(),
*pytest_runner.rules(),
*download_pex_bin.rules(),
*determine_source_files.rules(),
*pex.rules(),
*pex_from_target_closure.rules(),
*prepare_chrooted_python_sources.rules(),
*python_native_code.rules(),
*strip_source_roots.rules(),
*subprocess_environment.rules(),
subsystem_rule(TestOptions),
RootRule(PytestRunner),
)
def run_pytest(
self, *, passthrough_args: Optional[str] = None, origin: Optional[OriginSpec] = None,
) -> TestResult:
args = [
"--backend-packages2=pants.backend.python",
# pin to lower versions so that we can run Python 2 tests
"--pytest-version=pytest>=4.6.6,<4.7",
"--pytest-pytest-plugins=['zipp==1.0.0']",
]
if passthrough_args:
args.append(f"--pytest-args='{passthrough_args}'")
options_bootstrapper = create_options_bootstrapper(args=args)
if origin is None:
origin = SingleAddress(directory=self.source_root, name="target")
# TODO: We must use the V1 target's `_sources_field.sources` field to set the TargetAdaptor's
# sources attribute. The adaptor will not auto-populate this field. However, it will
# auto-populate things like `dependencies` and this was not necessary before using
# PythonTestsAdaptorWithOrigin. Why is this necessary in test code?
v1_target = self.target(f"{self.source_root}:target")
adaptor = PythonTestsAdaptor(
address=v1_target.address.to_address(), sources=v1_target._sources_field.sources,
)
params = Params(
PytestRunner(PythonTestsAdaptorWithOrigin(adaptor, origin)), options_bootstrapper
)
test_result = self.request_single_product(TestResult, params)
debug_request = self.request_single_product(TestDebugRequest, params)
debug_result = InteractiveRunner(self.scheduler).run_local_interactive_process(
debug_request.ipr
)
if test_result.status == Status.SUCCESS:
assert debug_result.process_exit_code == 0
else:
assert debug_result.process_exit_code != 0
return test_result
def test_single_passing_test(self) -> None:
self.create_python_test_target([self.good_source])
result = self.run_pytest()
assert result.status == Status.SUCCESS
assert "test_good.py ." in result.stdout
def test_single_failing_test(self) -> None:
self.create_python_test_target([self.bad_source])
result = self.run_pytest()
assert result.status == Status.FAILURE
assert "test_bad.py F" in result.stdout
def test_mixed_sources(self) -> None:
self.create_python_test_target([self.good_source, self.bad_source])
result = self.run_pytest()
assert result.status == Status.FAILURE
assert "test_good.py ." in result.stdout
assert "test_bad.py F" in result.stdout
def test_precise_file_args(self) -> None:
self.create_python_test_target([self.good_source, self.bad_source])
file_arg = FilesystemLiteralSpec(
PurePath(self.source_root, self.good_source.path).as_posix()
)
result = self.run_pytest(origin=file_arg)
assert result.status == Status.SUCCESS
assert "test_good.py ." in result.stdout
assert "test_bad.py F" not in result.stdout
def test_absolute_import(self) -> None:
self.create_basic_library()
source = FileContent(
path="test_absolute_import.py",
content=dedent(
"""\
from pants_test.library import add_two
def test():
assert add_two(2) == 4
"""
).encode(),
)
self.create_python_test_target([source], dependencies=[":library"])
result = self.run_pytest()
assert result.status == Status.SUCCESS
assert "test_absolute_import.py ." in result.stdout
def test_relative_import(self) -> None:
self.create_basic_library()
source = FileContent(
path="test_relative_import.py",
content=dedent(
"""\
from .library import add_two
def test():
assert add_two(2) == 4
"""
).encode(),
)
self.create_python_test_target([source], dependencies=[":library"])
result = self.run_pytest()
assert result.status == Status.SUCCESS
assert "test_relative_import.py ." in result.stdout
def test_transitive_dep(self) -> None:
self.create_basic_library()
self.create_python_library(
name="transitive_dep", sources=["transitive_dep.py"], dependencies=[":library"],
)
self.write_file(
FileContent(
path="transitive_dep.py",
content=dedent(
"""\
from pants_test.library import add_two
def add_four(x):
return add_two(x) + 2
"""
).encode(),
)
)
source = FileContent(
path="test_transitive_dep.py",
content=dedent(
"""\
from pants_test.transitive_dep import add_four
def test():
assert add_four(2) == 6
"""
).encode(),
)
self.create_python_test_target([source], dependencies=[":transitive_dep"])
result = self.run_pytest()
assert result.status == Status.SUCCESS
assert "test_transitive_dep.py ." in result.stdout
def test_thirdparty_dep(self) -> None:
self.setup_thirdparty_dep()
source = FileContent(
path="test_3rdparty_dep.py",
content=dedent(
"""\
from ordered_set import OrderedSet
def test():
assert OrderedSet((1, 2)) == OrderedSet([1, 2])
"""
).encode(),
)
self.create_python_test_target([source], dependencies=["3rdparty/python:ordered-set"])
result = self.run_pytest()
assert result.status == Status.SUCCESS
assert "test_3rdparty_dep.py ." in result.stdout
def test_thirdparty_transitive_dep(self) -> None:
self.setup_thirdparty_dep()
self.create_python_library(
name="library", sources=["library.py"], dependencies=["3rdparty/python:ordered-set"],
)
self.write_file(
FileContent(
path="library.py",
content=dedent(
"""\
import string
from ordered_set import OrderedSet
alphabet = OrderedSet(string.ascii_lowercase)
"""
).encode(),
)
)
source = FileContent(
path="test_3rdparty_transitive_dep.py",
content=dedent(
"""\
from pants_test.library import alphabet
def test():
assert 'a' in alphabet and 'z' in alphabet
"""
).encode(),
)
self.create_python_test_target([source], dependencies=[":library"])
result = self.run_pytest()
assert result.status == Status.SUCCESS
assert "test_3rdparty_transitive_dep.py ." in result.stdout
@skip_unless_python27_and_python3_present
def test_uses_correct_python_version(self) -> None:
self.create_python_test_target(
[self.py3_only_source], interpreter_constraints="CPython==2.7.*"
)
py2_result = self.run_pytest()
assert py2_result.status == Status.FAILURE
assert "SyntaxError: invalid syntax" in py2_result.stdout
Path(
self.build_root, self.source_root, "BUILD"
).unlink() # Cleanup in order to recreate the target
self.create_python_test_target(
[self.py3_only_source], interpreter_constraints="CPython>=3.6"
)
py3_result = self.run_pytest()
assert py3_result.status == Status.SUCCESS
assert "test_py3.py ." in py3_result.stdout
def test_respects_passthrough_args(self) -> None:
source = FileContent(
path="test_config.py",
content=dedent(
"""\
def test_run_me():
pass
def test_ignore_me():
pass
"""
).encode(),
)
self.create_python_test_target([source])
result = self.run_pytest(passthrough_args="-k test_run_me")
assert result.status == Status.SUCCESS
assert "test_config.py ." in result.stdout
assert "collected 2 items / 1 deselected / 1 selected" in result.stdout
|
|
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import uuid
import mock
from oslo.config import cfg
from neutron.agent.common import config as agent_config
from neutron.agent import l3_agent
from neutron.agent import l3_ha_agent
from neutron.agent.linux import ip_lib
from neutron.common import config as base_config
from neutron import context
from neutron.plugins.common import constants
from neutron.services.firewall.agents import firewall_agent_api
from neutron.services.firewall.agents.l3reference import firewall_l3_agent
from neutron.tests import base
from neutron.tests.unit.services.firewall.agents import test_firewall_agent_api
class FWaasHelper(object):
def __init__(self, host):
pass
class FWaasAgent(firewall_l3_agent.FWaaSL3AgentRpcCallback, FWaasHelper):
neutron_service_plugins = []
def _setup_test_agent_class(service_plugins):
class FWaasTestAgent(firewall_l3_agent.FWaaSL3AgentRpcCallback,
FWaasHelper):
neutron_service_plugins = service_plugins
return FWaasTestAgent
class TestFwaasL3AgentRpcCallback(base.BaseTestCase):
def setUp(self):
super(TestFwaasL3AgentRpcCallback, self).setUp()
self.conf = cfg.ConfigOpts()
self.conf.register_opts(base_config.core_opts)
self.conf.register_opts(l3_agent.L3NATAgent.OPTS)
self.conf.register_opts(l3_ha_agent.OPTS)
agent_config.register_use_namespaces_opts_helper(self.conf)
agent_config.register_root_helper(self.conf)
self.conf.root_helper = 'sudo'
self.conf.register_opts(firewall_agent_api.FWaaSOpts, 'fwaas')
self.api = FWaasAgent(self.conf)
self.api.fwaas_driver = test_firewall_agent_api.NoopFwaasDriver()
def test_fw_config_match(self):
test_agent_class = _setup_test_agent_class([constants.FIREWALL])
cfg.CONF.set_override('enabled', True, 'fwaas')
with mock.patch('neutron.openstack.common.importutils.import_object'):
test_agent_class(cfg.CONF)
def test_fw_config_mismatch_plugin_enabled_agent_disabled(self):
test_agent_class = _setup_test_agent_class([constants.FIREWALL])
cfg.CONF.set_override('enabled', False, 'fwaas')
self.assertRaises(SystemExit, test_agent_class, cfg.CONF)
def test_fw_plugin_list_unavailable(self):
test_agent_class = _setup_test_agent_class(None)
cfg.CONF.set_override('enabled', False, 'fwaas')
with mock.patch('neutron.openstack.common.importutils.import_object'):
test_agent_class(cfg.CONF)
def test_create_firewall(self):
fake_firewall = {'id': 0}
with mock.patch.object(
self.api,
'_invoke_driver_for_plugin_api'
) as mock_driver:
self.assertEqual(
self.api.create_firewall(
mock.sentinel.context,
fake_firewall,
'host'),
mock_driver.return_value)
def test_update_firewall(self):
fake_firewall = {'id': 0}
with mock.patch.object(
self.api,
'_invoke_driver_for_plugin_api'
) as mock_driver:
self.assertEqual(
self.api.update_firewall(
mock.sentinel.context,
fake_firewall,
'host'),
mock_driver.return_value)
def test_delete_firewall(self):
fake_firewall = {'id': 0}
with mock.patch.object(
self.api,
'_invoke_driver_for_plugin_api'
) as mock_driver:
self.assertEqual(
self.api.delete_firewall(
mock.sentinel.context,
fake_firewall,
'host'),
mock_driver.return_value)
def test_invoke_driver_for_plugin_api(self):
fake_firewall = {'id': 0, 'tenant_id': 1,
'admin_state_up': True}
self.api.plugin_rpc = mock.Mock()
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'create_firewall'),
mock.patch.object(self.api.fwplugin_rpc, 'set_firewall_status')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_create_firewall,
mock_set_firewall_status):
mock_driver_create_firewall.return_value = True
self.api.create_firewall(
context=mock.sentinel.context,
firewall=fake_firewall, host='host')
mock_get_routers.assert_called_once_with(
mock.sentinel.context)
mock_get_router_info_list_for_tenant.assert_called_once_with(
mock_get_routers.return_value, fake_firewall['tenant_id'])
mock_set_firewall_status.assert_called_once_with(
mock.sentinel.context,
fake_firewall['id'],
'ACTIVE')
def test_invoke_driver_for_plugin_api_admin_state_down(self):
fake_firewall = {'id': 0, 'tenant_id': 1,
'admin_state_up': False}
self.api.plugin_rpc = mock.Mock()
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'update_firewall'),
mock.patch.object(self.api.fwplugin_rpc,
'get_firewalls_for_tenant'),
mock.patch.object(self.api.fwplugin_rpc, 'set_firewall_status')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_update_firewall,
mock_get_firewalls_for_tenant,
mock_set_firewall_status):
mock_driver_update_firewall.return_value = True
self.api.update_firewall(
context=mock.sentinel.context,
firewall=fake_firewall, host='host')
mock_get_routers.assert_called_once_with(
mock.sentinel.context)
mock_get_router_info_list_for_tenant.assert_called_once_with(
mock_get_routers.return_value, fake_firewall['tenant_id'])
mock_set_firewall_status.assert_called_once_with(
mock.sentinel.context,
fake_firewall['id'],
'DOWN')
def test_invoke_driver_for_plugin_api_delete(self):
fake_firewall = {'id': 0, 'tenant_id': 1,
'admin_state_up': True}
self.api.plugin_rpc = mock.Mock()
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'delete_firewall'),
mock.patch.object(self.api.fwplugin_rpc, 'firewall_deleted')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_delete_firewall,
mock_firewall_deleted):
mock_driver_delete_firewall.return_value = True
self.api.delete_firewall(
context=mock.sentinel.context,
firewall=fake_firewall, host='host')
mock_get_routers.assert_called_once_with(
mock.sentinel.context)
mock_get_router_info_list_for_tenant.assert_called_once_with(
mock_get_routers.return_value, fake_firewall['tenant_id'])
mock_firewall_deleted.assert_called_once_with(
mock.sentinel.context,
fake_firewall['id'])
def test_delete_firewall_no_router(self):
fake_firewall = {'id': 0, 'tenant_id': 1}
self.api.plugin_rpc = mock.Mock()
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwplugin_rpc, 'firewall_deleted')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_firewall_deleted):
mock_get_router_info_list_for_tenant.return_value = []
self.api.delete_firewall(
context=mock.sentinel.context,
firewall=fake_firewall, host='host')
mock_get_routers.assert_called_once_with(
mock.sentinel.context)
mock_get_router_info_list_for_tenant.assert_called_once_with(
mock_get_routers.return_value, fake_firewall['tenant_id'])
mock_firewall_deleted.assert_called_once_with(
mock.sentinel.context,
fake_firewall['id'])
def test_process_router_add_fw_update(self):
fake_firewall_list = [{'id': 0, 'tenant_id': 1,
'status': constants.PENDING_UPDATE,
'admin_state_up': True}]
fake_router = {'id': 1111, 'tenant_id': 2}
self.api.plugin_rpc = mock.Mock()
agent_mode = 'legacy'
ri = mock.Mock()
ri.router = fake_router
routers = [ri.router]
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'update_firewall'),
mock.patch.object(self.api.fwplugin_rpc, 'set_firewall_status'),
mock.patch.object(self.api.fwplugin_rpc,
'get_firewalls_for_tenant'),
mock.patch.object(context, 'Context')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_update_firewall,
mock_set_firewall_status,
mock_get_firewalls_for_tenant,
mock_Context):
mock_driver_update_firewall.return_value = True
ctx = mock.sentinel.context
mock_Context.return_value = ctx
mock_get_router_info_list_for_tenant.return_value = routers
mock_get_firewalls_for_tenant.return_value = fake_firewall_list
self.api._process_router_add(ri)
mock_get_router_info_list_for_tenant.assert_called_with(
routers,
ri.router['tenant_id'])
mock_get_firewalls_for_tenant.assert_called_once_with(ctx)
mock_driver_update_firewall.assert_called_once_with(
agent_mode,
routers,
fake_firewall_list[0])
mock_set_firewall_status.assert_called_once_with(
ctx,
fake_firewall_list[0]['id'],
constants.ACTIVE)
def test_process_router_add_fw_delete(self):
fake_firewall_list = [{'id': 0, 'tenant_id': 1,
'status': constants.PENDING_DELETE}]
fake_router = {'id': 1111, 'tenant_id': 2}
agent_mode = 'legacy'
self.api.plugin_rpc = mock.Mock()
ri = mock.Mock()
ri.router = fake_router
routers = [ri.router]
with contextlib.nested(
mock.patch.object(self.api.plugin_rpc, 'get_routers'),
mock.patch.object(self.api, '_get_router_info_list_for_tenant'),
mock.patch.object(self.api.fwaas_driver, 'delete_firewall'),
mock.patch.object(self.api.fwplugin_rpc, 'firewall_deleted'),
mock.patch.object(self.api.fwplugin_rpc,
'get_firewalls_for_tenant'),
mock.patch.object(context, 'Context')
) as (
mock_get_routers,
mock_get_router_info_list_for_tenant,
mock_driver_delete_firewall,
mock_firewall_deleted,
mock_get_firewalls_for_tenant,
mock_Context):
mock_driver_delete_firewall.return_value = True
ctx = mock.sentinel.context
mock_Context.return_value = ctx
mock_get_router_info_list_for_tenant.return_value = routers
mock_get_firewalls_for_tenant.return_value = fake_firewall_list
self.api._process_router_add(ri)
mock_get_router_info_list_for_tenant.assert_called_with(
routers,
ri.router['tenant_id'])
mock_get_firewalls_for_tenant.assert_called_once_with(ctx)
mock_driver_delete_firewall.assert_called_once_with(
agent_mode,
routers,
fake_firewall_list[0])
mock_firewall_deleted.assert_called_once_with(
ctx,
fake_firewall_list[0]['id'])
def _prepare_router_data(self, use_namespaces):
router = {'id': str(uuid.uuid4()), 'tenant_id': str(uuid.uuid4())}
ns = "ns-" + router['id']
return l3_agent.RouterInfo(router['id'], self.conf.root_helper,
use_namespaces, router=router, ns_name=ns)
def _get_router_info_list_with_namespace_helper(self,
router_use_namespaces):
self.conf.set_override('use_namespaces', True)
ri = self._prepare_router_data(
use_namespaces=router_use_namespaces)
routers = [ri.router]
self.api.router_info = {ri.router_id: ri}
with mock.patch.object(ip_lib.IPWrapper,
'get_namespaces') as mock_get_namespaces:
mock_get_namespaces.return_value = ri.ns_name
router_info_list = self.api._get_router_info_list_for_tenant(
routers,
ri.router['tenant_id'])
self.assertEqual([ri], router_info_list)
mock_get_namespaces.assert_called_once_with(
self.conf.root_helper)
def _get_router_info_list_without_namespace_helper(self,
router_use_namespaces):
self.conf.set_override('use_namespaces', False)
ri = self._prepare_router_data(
use_namespaces=router_use_namespaces)
routers = [ri.router]
self.api.router_info = {ri.router_id: ri}
router_info_list = self.api._get_router_info_list_for_tenant(
routers,
ri.router['tenant_id'])
if router_use_namespaces:
self.assertFalse(router_info_list)
else:
self.assertEqual([ri], router_info_list)
def test_get_router_info_list_for_tenant_for_namespaces_enabled(self):
self._get_router_info_list_with_namespace_helper(
router_use_namespaces=True)
def test_get_router_info_list_for_tenant_for_namespaces_disabled(self):
self._get_router_info_list_without_namespace_helper(
router_use_namespaces=False)
def test_get_router_info_list_tenant_with_namespace_router_without(self):
self._get_router_info_list_with_namespace_helper(
router_use_namespaces=False)
def test_get_router_info_list_tenant_without_namespace_router_with(self):
self._get_router_info_list_without_namespace_helper(
router_use_namespaces=True)
def _get_router_info_list_router_without_router_info_helper(self,
rtr_with_ri):
self.conf.set_override('use_namespaces', True)
# ri.router with associated router_info (ri)
# rtr2 has no router_info
ri = self._prepare_router_data(use_namespaces=True)
rtr2 = {'id': str(uuid.uuid4()), 'tenant_id': ri.router['tenant_id']}
routers = [rtr2]
self.api.router_info = {}
ri_expected = []
if rtr_with_ri:
self.api.router_info[ri.router_id] = ri
routers.append(ri.router)
ri_expected.append(ri)
with mock.patch.object(ip_lib.IPWrapper,
'get_namespaces') as mock_get_namespaces:
mock_get_namespaces.return_value = ri.ns_name
router_info_list = self.api._get_router_info_list_for_tenant(
routers,
ri.router['tenant_id'])
self.assertEqual(ri_expected, router_info_list)
def test_get_router_info_list_router_without_router_info(self):
self._get_router_info_list_router_without_router_info_helper(
rtr_with_ri=False)
def test_get_router_info_list_two_routers_one_without_router_info(self):
self._get_router_info_list_router_without_router_info_helper(
rtr_with_ri=True)
|
|
# Lint as: python2, python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python TF-Lite interpreter."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ctypes
import platform
import sys
import os
import numpy as np
# pylint: disable=g-import-not-at-top
if not os.path.splitext(__file__)[0].endswith('tflite_runtime/interpreter'):
# This file is part of tensorflow package.
from tensorflow.lite.python.interpreter_wrapper import _pywrap_tensorflow_interpreter_wrapper as _interpreter_wrapper
from tensorflow.python.util.tf_export import tf_export as _tf_export
else:
# This file is part of tflite_runtime package.
from tflite_runtime import _pywrap_tensorflow_interpreter_wrapper as _interpreter_wrapper
def _tf_export(*x, **kwargs):
del x, kwargs
return lambda x: x
class Delegate(object):
"""Python wrapper class to manage TfLiteDelegate objects.
The shared library is expected to have two functions:
TfLiteDelegate* tflite_plugin_create_delegate(
char**, char**, size_t, void (*report_error)(const char *))
void tflite_plugin_destroy_delegate(TfLiteDelegate*)
The first one creates a delegate object. It may return NULL to indicate an
error (with a suitable error message reported by calling report_error()).
The second one destroys delegate object and must be called for every
created delegate object. Passing NULL as argument value is allowed, i.e.
tflite_plugin_destroy_delegate(tflite_plugin_create_delegate(...))
always works.
"""
def __init__(self, library, options=None):
"""Loads delegate from the shared library.
Args:
library: Shared library name.
options: Dictionary of options that are required to load the delegate. All
keys and values in the dictionary should be serializable. Consult the
documentation of the specific delegate for required and legal options.
(default None)
Raises:
RuntimeError: This is raised if the Python implementation is not CPython.
"""
# TODO(b/136468453): Remove need for __del__ ordering needs of CPython
# by using explicit closes(). See implementation of Interpreter __del__.
if platform.python_implementation() != 'CPython':
raise RuntimeError('Delegates are currently only supported into CPython'
'due to missing immediate reference counting.')
self._library = ctypes.pydll.LoadLibrary(library)
self._library.tflite_plugin_create_delegate.argtypes = [
ctypes.POINTER(ctypes.c_char_p),
ctypes.POINTER(ctypes.c_char_p), ctypes.c_int,
ctypes.CFUNCTYPE(None, ctypes.c_char_p)
]
self._library.tflite_plugin_create_delegate.restype = ctypes.c_void_p
# Convert the options from a dictionary to lists of char pointers.
options = options or {}
options_keys = (ctypes.c_char_p * len(options))()
options_values = (ctypes.c_char_p * len(options))()
for idx, (key, value) in enumerate(options.items()):
options_keys[idx] = str(key).encode('utf-8')
options_values[idx] = str(value).encode('utf-8')
class ErrorMessageCapture(object):
def __init__(self):
self.message = ''
def report(self, x):
self.message += x if isinstance(x, str) else x.decode('utf-8')
capture = ErrorMessageCapture()
error_capturer_cb = ctypes.CFUNCTYPE(None, ctypes.c_char_p)(capture.report)
# Do not make a copy of _delegate_ptr. It is freed by Delegate's finalizer.
self._delegate_ptr = self._library.tflite_plugin_create_delegate(
options_keys, options_values, len(options), error_capturer_cb)
if self._delegate_ptr is None:
raise ValueError(capture.message)
def __del__(self):
# __del__ can not be called multiple times, so if the delegate is destroyed.
# don't try to destroy it twice.
if self._library is not None:
self._library.tflite_plugin_destroy_delegate.argtypes = [ctypes.c_void_p]
self._library.tflite_plugin_destroy_delegate(self._delegate_ptr)
self._library = None
def _get_native_delegate_pointer(self):
"""Returns the native TfLiteDelegate pointer.
It is not safe to copy this pointer because it needs to be freed.
Returns:
TfLiteDelegate *
"""
return self._delegate_ptr
@_tf_export('lite.experimental.load_delegate')
def load_delegate(library, options=None):
"""Returns loaded Delegate object.
Args:
library: Name of shared library containing the
[TfLiteDelegate](https://www.tensorflow.org/lite/performance/delegates).
options: Dictionary of options that are required to load the delegate. All
keys and values in the dictionary should be convertible to str. Consult
the documentation of the specific delegate for required and legal options.
(default None)
Returns:
Delegate object.
Raises:
ValueError: Delegate failed to load.
RuntimeError: If delegate loading is used on unsupported platform.
"""
try:
delegate = Delegate(library, options)
except ValueError as e:
raise ValueError('Failed to load delegate from {}\n{}'.format(
library, str(e)))
return delegate
@_tf_export('lite.Interpreter')
class Interpreter(object):
"""Interpreter interface for TensorFlow Lite Models.
This makes the TensorFlow Lite interpreter accessible in Python.
It is possible to use this interpreter in a multithreaded Python environment,
but you must be sure to call functions of a particular instance from only
one thread at a time. So if you want to have 4 threads running different
inferences simultaneously, create an interpreter for each one as thread-local
data. Similarly, if you are calling invoke() in one thread on a single
interpreter but you want to use tensor() on another thread once it is done,
you must use a synchronization primitive between the threads to ensure invoke
has returned before calling tensor().
"""
def __init__(self,
model_path=None,
model_content=None,
experimental_delegates=None,
num_threads=None):
"""Constructor.
Args:
model_path: Path to TF-Lite Flatbuffer file.
model_content: Content of model.
experimental_delegates: Experimental. Subject to change. List of
[TfLiteDelegate](https://www.tensorflow.org/lite/performance/delegates)
objects returned by lite.load_delegate().
num_threads: Sets the number of threads used by the interpreter and
available to CPU kernels. If not set, the interpreter will use an
implementation-dependent default number of threads. Currently, only a
subset of kernels, such as conv, support multi-threading.
Raises:
ValueError: If the interpreter was unable to create.
"""
if not hasattr(self, '_custom_op_registerers'):
self._custom_op_registerers = []
if model_path and not model_content:
custom_op_registerers_by_name = [
x for x in self._custom_op_registerers if isinstance(x, str)
]
custom_op_registerers_by_func = [
x for x in self._custom_op_registerers if not isinstance(x, str)
]
self._interpreter = (
_interpreter_wrapper.CreateWrapperFromFile(
model_path, custom_op_registerers_by_name,
custom_op_registerers_by_func))
if not self._interpreter:
raise ValueError('Failed to open {}'.format(model_path))
elif model_content and not model_path:
custom_op_registerers_by_name = [
x for x in self._custom_op_registerers if isinstance(x, str)
]
custom_op_registerers_by_func = [
x for x in self._custom_op_registerers if not isinstance(x, str)
]
# Take a reference, so the pointer remains valid.
# Since python strings are immutable then PyString_XX functions
# will always return the same pointer.
self._model_content = model_content
self._interpreter = (
_interpreter_wrapper.CreateWrapperFromBuffer(
model_content, custom_op_registerers_by_name,
custom_op_registerers_by_func))
elif not model_content and not model_path:
raise ValueError('`model_path` or `model_content` must be specified.')
else:
raise ValueError('Can\'t both provide `model_path` and `model_content`')
if num_threads is not None:
if not isinstance(num_threads, int):
raise ValueError('type of num_threads should be int')
if num_threads < 1:
raise ValueError('num_threads should >= 1')
self._interpreter.SetNumThreads(num_threads)
# Each delegate is a wrapper that owns the delegates that have been loaded
# as plugins. The interpreter wrapper will be using them, but we need to
# hold them in a list so that the lifetime is preserved at least as long as
# the interpreter wrapper.
self._delegates = []
if experimental_delegates:
self._delegates = experimental_delegates
for delegate in self._delegates:
self._interpreter.ModifyGraphWithDelegate(
delegate._get_native_delegate_pointer()) # pylint: disable=protected-access
def __del__(self):
# Must make sure the interpreter is destroyed before things that
# are used by it like the delegates. NOTE this only works on CPython
# probably.
# TODO(b/136468453): Remove need for __del__ ordering needs of CPython
# by using explicit closes(). See implementation of Interpreter __del__.
self._interpreter = None
self._delegates = None
def allocate_tensors(self):
self._ensure_safe()
return self._interpreter.AllocateTensors()
def _safe_to_run(self):
"""Returns true if there exist no numpy array buffers.
This means it is safe to run tflite calls that may destroy internally
allocated memory. This works, because in the wrapper.cc we have made
the numpy base be the self._interpreter.
"""
# NOTE, our tensor() call in cpp will use _interpreter as a base pointer.
# If this environment is the only _interpreter, then the ref count should be
# 2 (1 in self and 1 in temporary of sys.getrefcount).
return sys.getrefcount(self._interpreter) == 2
def _ensure_safe(self):
"""Makes sure no numpy arrays pointing to internal buffers are active.
This should be called from any function that will call a function on
_interpreter that may reallocate memory e.g. invoke(), ...
Raises:
RuntimeError: If there exist numpy objects pointing to internal memory
then we throw.
"""
if not self._safe_to_run():
raise RuntimeError("""There is at least 1 reference to internal data
in the interpreter in the form of a numpy array or slice. Be sure to
only hold the function returned from tensor() if you are using raw
data access.""")
# Experimental and subject to change
def _get_op_details(self, op_index):
"""Gets a dictionary with arrays of ids for tensors involved with an op.
Args:
op_index: Operation/node index of node to query.
Returns:
a dictionary containing the index, op name, and arrays with lists of the
indices for the inputs and outputs of the op/node.
"""
op_index = int(op_index)
op_name = self._interpreter.NodeName(op_index)
op_inputs = self._interpreter.NodeInputs(op_index)
op_outputs = self._interpreter.NodeOutputs(op_index)
details = {
'index': op_index,
'op_name': op_name,
'inputs': op_inputs,
'outputs': op_outputs,
}
return details
def _get_tensor_details(self, tensor_index):
"""Gets tensor details.
Args:
tensor_index: Tensor index of tensor to query.
Returns:
A dictionary containing the following fields of the tensor:
'name': The tensor name.
'index': The tensor index in the interpreter.
'shape': The shape of the tensor.
'quantization': Deprecated, use 'quantization_parameters'. This field
only works for per-tensor quantization, whereas
'quantization_parameters' works in all cases.
'quantization_parameters': The parameters used to quantize the tensor:
'scales': List of scales (one if per-tensor quantization)
'zero_points': List of zero_points (one if per-tensor quantization)
'quantized_dimension': Specifies the dimension of per-axis
quantization, in the case of multiple scales/zero_points.
Raises:
ValueError: If tensor_index is invalid.
"""
tensor_index = int(tensor_index)
tensor_name = self._interpreter.TensorName(tensor_index)
tensor_size = self._interpreter.TensorSize(tensor_index)
tensor_size_signature = self._interpreter.TensorSizeSignature(tensor_index)
tensor_type = self._interpreter.TensorType(tensor_index)
tensor_quantization = self._interpreter.TensorQuantization(tensor_index)
tensor_quantization_params = self._interpreter.TensorQuantizationParameters(
tensor_index)
tensor_sparsity_params = self._interpreter.TensorSparsityParameters(
tensor_index)
if not tensor_name or not tensor_type:
raise ValueError('Could not get tensor details')
details = {
'name': tensor_name,
'index': tensor_index,
'shape': tensor_size,
'shape_signature': tensor_size_signature,
'dtype': tensor_type,
'quantization': tensor_quantization,
'quantization_parameters': {
'scales': tensor_quantization_params[0],
'zero_points': tensor_quantization_params[1],
'quantized_dimension': tensor_quantization_params[2],
},
'sparsity_parameters': tensor_sparsity_params
}
return details
# Experimental and subject to change
def _get_ops_details(self):
"""Gets op details for every node.
Returns:
A list of dictionaries containing arrays with lists of tensor ids for
tensors involved in the op.
"""
return [
self._get_op_details(idx) for idx in range(self._interpreter.NumNodes())
]
def get_tensor_details(self):
"""Gets tensor details for every tensor with valid tensor details.
Tensors where required information about the tensor is not found are not
added to the list. This includes temporary tensors without a name.
Returns:
A list of dictionaries containing tensor information.
"""
tensor_details = []
for idx in range(self._interpreter.NumTensors()):
try:
tensor_details.append(self._get_tensor_details(idx))
except ValueError:
pass
return tensor_details
def get_input_details(self):
"""Gets model input details.
Returns:
A list of input details.
"""
return [
self._get_tensor_details(i) for i in self._interpreter.InputIndices()
]
def set_tensor(self, tensor_index, value):
"""Sets the value of the input tensor.
Note this copies data in `value`.
If you want to avoid copying, you can use the `tensor()` function to get a
numpy buffer pointing to the input buffer in the tflite interpreter.
Args:
tensor_index: Tensor index of tensor to set. This value can be gotten from
the 'index' field in get_input_details.
value: Value of tensor to set.
Raises:
ValueError: If the interpreter could not set the tensor.
"""
self._interpreter.SetTensor(tensor_index, value)
def resize_tensor_input(self, input_index, tensor_size, strict=False):
"""Resizes an input tensor.
```
interpreter = Interpreter(model_content=tflite_model)
interpreter.resize_tensor_input(0, [1, 224, 224, 3], strict=True)
interpreter.allocate_tensors()
interpreter.invoke()
```
Args:
input_index: Tensor index of input to set. This value can be gotten from
the 'index' field in get_input_details.
tensor_size: The tensor_shape to resize the input to.
strict: Only unknown dimensions can be resized when `strict` is True.
Unknown dimensions are indicated as `-1` in the `shape_signature`
attribute of a given tensor. (default False)
Raises:
ValueError: If the interpreter could not resize the input tensor.
"""
self._ensure_safe()
# `ResizeInputTensor` now only accepts int32 numpy array as `tensor_size
# parameter.
tensor_size = np.array(tensor_size, dtype=np.int32)
self._interpreter.ResizeInputTensor(input_index, tensor_size, strict)
def get_output_details(self):
"""Gets model output details.
Returns:
A list of output details.
"""
return [
self._get_tensor_details(i) for i in self._interpreter.OutputIndices()
]
def get_tensor(self, tensor_index):
"""Gets the value of the input tensor (get a copy).
If you wish to avoid the copy, use `tensor()`. This function cannot be used
to read intermediate results.
Args:
tensor_index: Tensor index of tensor to get. This value can be gotten from
the 'index' field in get_output_details.
Returns:
a numpy array.
"""
return self._interpreter.GetTensor(tensor_index)
def tensor(self, tensor_index):
"""Returns function that gives a numpy view of the current tensor buffer.
This allows reading and writing to this tensors w/o copies. This more
closely mirrors the C++ Interpreter class interface's tensor() member, hence
the name. Be careful to not hold these output references through calls
to `allocate_tensors()` and `invoke()`. This function cannot be used to read
intermediate results.
Usage:
```
interpreter.allocate_tensors()
input = interpreter.tensor(interpreter.get_input_details()[0]["index"])
output = interpreter.tensor(interpreter.get_output_details()[0]["index"])
for i in range(10):
input().fill(3.)
interpreter.invoke()
print("inference %s" % output())
```
Notice how this function avoids making a numpy array directly. This is
because it is important to not hold actual numpy views to the data longer
than necessary. If you do, then the interpreter can no longer be invoked,
because it is possible the interpreter would resize and invalidate the
referenced tensors. The NumPy API doesn't allow any mutability of the
the underlying buffers.
WRONG:
```
input = interpreter.tensor(interpreter.get_input_details()[0]["index"])()
output = interpreter.tensor(interpreter.get_output_details()[0]["index"])()
interpreter.allocate_tensors() # This will throw RuntimeError
for i in range(10):
input.fill(3.)
interpreter.invoke() # this will throw RuntimeError since input,output
```
Args:
tensor_index: Tensor index of tensor to get. This value can be gotten from
the 'index' field in get_output_details.
Returns:
A function that can return a new numpy array pointing to the internal
TFLite tensor state at any point. It is safe to hold the function forever,
but it is not safe to hold the numpy array forever.
"""
return lambda: self._interpreter.tensor(self._interpreter, tensor_index)
def invoke(self):
"""Invoke the interpreter.
Be sure to set the input sizes, allocate tensors and fill values before
calling this. Also, note that this function releases the GIL so heavy
computation can be done in the background while the Python interpreter
continues. No other function on this object should be called while the
invoke() call has not finished.
Raises:
ValueError: When the underlying interpreter fails raise ValueError.
"""
self._ensure_safe()
self._interpreter.Invoke()
def reset_all_variables(self):
return self._interpreter.ResetVariableTensors()
# Experimental and subject to change.
def _native_handle(self):
"""Returns a pointer to the underlying tflite::Interpreter instance.
This allows extending tflite.Interpreter's functionality in a custom C++
function. Consider how that may work in a custom pybind wrapper:
m.def("SomeNewFeature", ([](py::object handle) {
auto* interpreter =
reinterpret_cast<tflite::Interpreter*>(handle.cast<intptr_t>());
...
}))
and corresponding Python call:
SomeNewFeature(interpreter.native_handle())
Note: This approach is fragile. Users must guarantee the C++ extension build
is consistent with the tflite.Interpreter's underlying C++ build.
"""
return self._interpreter.interpreter()
class InterpreterWithCustomOps(Interpreter):
"""Interpreter interface for TensorFlow Lite Models that accepts custom ops.
The interface provided by this class is experimental and therefore not exposed
as part of the public API.
Wraps the tf.lite.Interpreter class and adds the ability to load custom ops
by providing the names of functions that take a pointer to a BuiltinOpResolver
and add a custom op.
"""
def __init__(self,
model_path=None,
model_content=None,
experimental_delegates=None,
custom_op_registerers=None):
"""Constructor.
Args:
model_path: Path to TF-Lite Flatbuffer file.
model_content: Content of model.
experimental_delegates: Experimental. Subject to change. List of
[TfLiteDelegate](https://www.tensorflow.org/lite/performance/delegates)
objects returned by lite.load_delegate().
custom_op_registerers: List of str (symbol names) or functions that take a
pointer to a MutableOpResolver and register a custom op. When passing
functions, use a pybind function that takes a uintptr_t that can be
recast as a pointer to a MutableOpResolver.
Raises:
ValueError: If the interpreter was unable to create.
"""
self._custom_op_registerers = custom_op_registerers
super(InterpreterWithCustomOps, self).__init__(
model_path=model_path,
model_content=model_content,
experimental_delegates=experimental_delegates)
|
|
import numpy as np
import scipy as sp
import logging as logger
import time
import pylab as pl
from collections import defaultdict
from sklearn.metrics import confusion_matrix
class PassiveAggressiveII(object):
"""
Passive Aggressive-II algorithm: squared hinge loss PA.
References:
- http://jmlr.org/papers/volume7/crammer06a/crammer06a.pdf
This model is only applied to binary classification.
"""
def __init__(self, fname, delimiter = " ", C = 1, n_scan = 10):
"""
model initialization.
"""
logger.basicConfig(level=logger.DEBUG)
logger.info("init starts")
self.n_scan = 10
self.data = defaultdict()
self.model = defaultdict()
self.cache = defaultdict()
self._load(fname, delimiter)
self._init_model(C)
logger.info("init finished")
def _load(self, fname, delimiter = " "):
"""
Load data set specified with filename.
data format must be as follows (space-separated file as default),
l_1 x_11 x_12 x_13 ... x_1m
l_2 x_21 x_22 ... x_2m
...
l_n x_n1 x_n2 ... x_nm
l_i must be {1, -1} because of binary classifier.
Arguments:
- `fname`: file name.
- `delimiter`: delimiter of a file.
"""
logger.info("load data starts")
# load data
self.data["data"] = np.loadtxt(fname, delimiter = delimiter)
self.data["n_sample"] = self.data["data"].shape[0]
self.data["f_dim"] = self.data["data"].shape[1] - 1
# binalize
self._binalize(self.data["data"])
# normalize
self.normalize(self.data["data"][:, 1:])
logger.info("load data finished")
def _binalize(self, data):
"""
Binalize label of data.
Arguments:
- `data`: dataset.
"""
logger.info("init starts")
# binary check
labels = data[:, 0]
classes = np.unique(labels)
if classes.size != 2:
print "label must be a binary value."
exit(1)
# convert binary lables to {1, -1}
for i in xrange(labels.size):
if labels[i] == classes[0]:
labels[i] = 1
else:
labels[i] = -1
# set classes
self.data["classes"] = classes
logger.info("init finished")
def normalize(self, samples):
"""
nomalize sample, such that sqrt(x^2) = 1
Arguments:
- `samples`: dataset without labels.
"""
logger.info("normalize starts")
for i in xrange(0, self.data["n_sample"]):
samples[i, :] = self._normalize(samples[i, :])
logger.info("normalize finished")
def _normalize(self, sample):
norm = np.sqrt(sample.dot(sample))
sample = sample/norm
return sample
def _init_model(self, C):
"""
Initialize model.
"""
logger.info("init model starts")
self.model["w"] = np.ndarray(self.data["f_dim"] + 1) # model paremter
self.model["C"] = C # aggressive parameter
logger.info("init model finished")
def _learn(self, ):
"""
Learn internally.
"""
def _update(self, label, sample, margin):
"""
Update model parameter internally.
update rule is as follows,
w = w + y (1 - m)/(||x||_2^2 + C) * x
Arguments:
- `label`: label = {1, -1}
- `sample`: sample, or feature vector
"""
# add bias
sample = self._add_bias(sample)
norm = sample.dot(sample)
w = self.model["w"] + label * (1 - margin)/(norm + self.model["C"]) * sample
self.model["w"] = w
def _predict_value(self, sample):
"""
predict value of \w^T * x
Arguments:
- `sample`:
"""
return self.model["w"].dot(self._add_bias(sample))
def _add_bias(self, sample):
return np.hstack((sample, 1))
def learn(self, ):
"""
Learn.
"""
logger.info("learn starts")
data = self.data["data"]
# learn
for i in xrange(0, self.n_scan):
for i in xrange(0, self.data["n_sample"]):
sample = data[i, 1:]
label = data[i, 0]
pred_val = self._predict_value(sample)
margin = label * pred_val
if margin < 1:
self._update(label, sample, margin)
logger.info("learn finished")
def predict(self, sample):
"""
predict {1, -1} base on \w^T * x
Arguments:
- `sample`:
"""
pred_val = self._predict_value(sample)
self.cache["pred_val"] = pred_val
if pred_val >=0:
return 1
else:
return -1
def update(self, label, sample):
"""
update model.
Arguments:
- `sample`: sample, or feature vector
- `pred_val`: predicted value i.e., w^T * sample
"""
margin = label * self.model["pred_val"]
if margin < 1:
_update(label, sample, margin)
@classmethod
def examplify(cls, fname, delimiter = " ", C = 1 , n_scan = 3):
"""
Example of how to use
"""
# learn
st = time.time()
model = PassiveAggressiveII(fname, delimiter, C , n_scan)
model.learn()
et = time.time()
print "learning time: %f[s]" % (et - st)
# predict (after learning)
data = np.loadtxt(fname, delimiter = " ")
model._binalize(data)
n_sample = data.shape[0]
y_label = data[:, 0]
y_pred = np.ndarray(n_sample)
for i in xrange(0, n_sample):
sample = data[i, 1:]
y_pred[i] = model.predict(sample)
# show result
cm = confusion_matrix(y_label, y_pred)
print cm
print "accurary: %d [%%]" % (np.sum(cm.diagonal()) * 100.0/np.sum(cm))
if __name__ == '__main__':
fname = "/home/kzk/datasets/uci_csv/liver.csv"
#fname = "/home/kzk/datasets/uci_csv/ad.csv"
print "dataset is", fname
PassiveAggressiveII.examplify(fname, delimiter = " ", C = 1, n_scan = 100)
|
|
import subprocess
import shlex
from time import sleep
import datetime
import shutil
import re
from django.conf import settings
from django.core.files import File
from django.utils import timezone
import thread
import traceback
import tempfile
import pytz
import requests
import os
import zipfile
from cidonkey.models import BuildInfo
from . import cidocker, github, common
from settings import MAX_CONCURRENT_BUILDS
def build(bi):
thread.start_new_thread(BuildProcess.start_build, (bi,))
def check(bi):
return BuildProcess.check_docker(bi)
class BuildProcess(object):
def __init__(self, build_info):
assert isinstance(build_info, BuildInfo), 'build_info must be an instance of BuildInfo, not %s' % \
build_info.__class__.__name__
self.build_info = build_info
self.project = build_info.project
self.token = self.project.github_token
self.valid_token = isinstance(self.token, basestring) and len(self.token) > 0
self.badge_updates = self.build_info.on_master
@classmethod
def start_build(cls, build_info):
"""
run the build script.
"""
self = BuildProcess(build_info)
try:
self.build_info.start = datetime.datetime.now().replace(tzinfo=pytz.UTC)
self.build_info.process_log = ''
self._delete_old_containers()
self.build_info.temp_dir = tempfile.mkdtemp(prefix='cid_src_tmp')
self._set_url()
self._log('doing badge updates: %r' % self.badge_updates)
self.build_info.save()
self._update_status('pending', 'CI build underway')
self._set_svg('in_progress')
self.build_info.save()
self._download()
self.build_info.save()
self._zip_save_repo()
self.build_info.save()
self._log('STARTING DOCKER:')
self.build_info.container = cidocker.start_ci(self.project.docker_image, self.build_info.temp_dir)
self.build_info.container_exists = True
self.build_info.save()
while True:
sleep(settings.THREAD_CHECK_RATE)
bi = self._check_docker()
if bi.complete:
break
except (common.KnownError, common.CommandError), e:
self._log('%s: %s' % (e.__class__.__name__, str(e)), '')
self._process_error()
except Exception:
self._log(traceback.format_exc())
self._process_error()
finally:
self.build_info.save()
return self.build_info
@classmethod
def check_docker(cls, build_info):
"""
check status of a build to see if it's finished.
"""
self = BuildProcess(build_info)
bi = self._check_docker()
self._check_queue()
return bi
def _check_docker(self):
if self.build_info.complete:
return self.build_info
try:
if not self.build_info.container_exists:
return self.build_info
status = cidocker.check_progress(self.build_info.container)
if not status:
return self.build_info
exit_code, finished, logs, con_inspection = status
self.build_info.test_success = self.build_info.project.script_split in logs
if self.build_info.test_success:
self.build_info.test_passed = exit_code == 0
process_log, ci_log = logs.split(self.build_info.project.script_split, 1)
self.build_info.process_log += '\n' + process_log
self._log('DOCKER FINISHED, EXIT CODE: %r' % exit_code)
self.build_info.ci_log = ci_log
self.build_info.container_inspection = con_inspection
if self.project.coverage_regex:
m = re.search(self.project.coverage_regex, self.build_info.ci_log)
if m:
try:
self.build_info.coverage = float(m.groups()[0])
except (ValueError, IndexError):
pass
else:
self.build_info.process_log += '\n' + logs
self._log('DOCKER FINISHED')
shutil.rmtree(self.build_info.temp_dir, ignore_errors=True)
self.build_info.complete = True
self.build_info.finished = finished
if self.build_info.test_passed:
msg = 'CI Success'
if isinstance(self.build_info.coverage, float):
msg += ', %0.2f%% coverage' % self.build_info.coverage
self._update_status('success', msg)
else:
self._update_status('failure', 'Tests failed')
self._set_svg(self.build_info.test_passed)
except common.KnownError, e:
raise e
except Exception:
self._log(traceback.format_exc())
self._process_error()
finally:
self.build_info.save()
return self.build_info
def _delete_old_containers(self):
delay = settings.CONTAINER_DELETE_MINUTES
if delay < 0:
self._log('Not deleting old containers.')
return
n = datetime.datetime.now().replace(tzinfo=pytz.UTC) - datetime.timedelta(minutes=delay)
del_con_ids = BuildInfo.objects.filter(finished__lt=n).values_list('container', flat=True)
deleted_cons = cidocker.delete_old_containers(del_con_ids)
BuildInfo.objects.filter(container__in=deleted_cons).update(container_exists=False)
self._log('%d old containers deleted.' % len(deleted_cons))
def _process_error(self):
self._update_status('error', 'Error running tests')
self._set_svg(False)
if self.build_info.temp_dir:
shutil.rmtree(self.build_info.temp_dir, ignore_errors=True)
self.build_info.test_success = False
self.build_info.complete = True
self.build_info.finished = timezone.now()
@staticmethod
def _check_queue():
"""
Check if a new build can begin, if so start them
"""
if BuildInfo.objects.filter(complete=False, queued=False).count() < MAX_CONCURRENT_BUILDS:
queue_first = BuildInfo.objects.filter(queued=True).order_by('id').first()
if queue_first:
queue_first.queued = False
queue_first.save()
build(queue_first)
def _set_url(self):
"""
generate the url which will be used to clone the repo.
"""
token = ''
if self.project.private and self.valid_token:
token = self.token + '@'
self.url = 'https://%sgithub.com/%s/%s.git' % (token, self.project.github_user, self.project.github_repo)
self._log('clone url: %s' % self.url)
def _update_status(self, status, message):
assert status in ['pending', 'success', 'error', 'failure']
if not self.build_info.status_url or not settings.SET_STATUS:
return
if not self.valid_token:
self._log('WARNING: no valid token found, cannot update status of pull request')
return
payload = {
'state': status,
'description': message,
'context': common.UPDATE_CONTEXT,
'target_url': self.build_info.project.update_url + str(self.build_info.id)
}
_, r = github.github_api(
url=self.build_info.status_url,
token=self.token,
method=requests.post,
data=payload,
extra_headers={'Content-type': 'application/json'})
self._log('updated pull request, status "%s", response: %d' % (status, r.status_code))
if r.status_code != 201:
self._log('received unexpected status code, response code')
self._log('response headers: %r' % r.headers)
self._log('url posted to: %s' % self.build_info.status_url)
self._log('payload: %r' % payload)
self._log('text: %r' % r.text[:1000])
def _download(self):
self._log('cloning...')
commands = 'git clone %s %s' % (self.url, self.build_info.temp_dir)
self._execute(commands)
self._log('cloned code successfully')
if self.build_info.fetch_cmd:
self._log('fetching branch ' + self.build_info.fetch_cmd)
commands = ['git fetch origin ' + self.build_info.fetch_cmd]
if self.build_info.fetch_branch:
commands.append('git checkout ' + self.build_info.fetch_branch)
self._execute(commands)
if self.build_info.sha:
command = 'git checkout ' + self.build_info.sha
branch = self.build_info.label.split('/')[-1].replace(':', '-')
if branch != 'master':
command += ' -b ' + branch
self._execute(command)
def _zip_save_repo(self):
self._log('zipping repo...')
count = 0
with tempfile.TemporaryFile(suffix='.zip') as temp_file:
with zipfile.ZipFile(temp_file, 'w') as ztemp_file:
for root, dirs, files in os.walk(self.build_info.temp_dir):
for f in files:
full_path = os.path.join(root, f)
local_path = full_path.replace(self.build_info.temp_dir, '').lstrip('/')
ztemp_file.write(full_path, local_path)
count += 1
self._log('zipped %d files to archive, saving zip file...' % count)
self.build_info.archive.save(temp_file.name, File(temp_file))
def _execute(self, commands):
if isinstance(commands, basestring):
commands = [commands]
for command in commands:
if command.strip().startswith('#'):
self._log(command, 'SKIP> ')
continue
self._log(command, 'EXEC> ')
cargs = shlex.split(command)
try:
cienv = {}# os.environ.copy()
cienv['CIDONKEY'] = '1'
p = subprocess.Popen(cargs,
cwd=self.build_info.temp_dir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=cienv)
stdout, stderr = p.communicate()
if len(stdout) > 0:
self._log(stdout, '')
if p.returncode != 0:
raise common.CommandError(stderr)
elif len(stderr) > 0:
self._log(stderr)
except common.CommandError, e:
raise e
except Exception, e:
raise common.KnownError('%s: %s' % (e.__class__.__name__, str(e)))
def _set_svg(self, status):
if not self.badge_updates:
return
if status == 'in_progress':
status_svg = 'in_progress.svg'
else:
status_svg = 'passing.svg' if status else 'failing.svg'
self._log('setting status svg to %s' % status_svg)
self.project.status_svg = status_svg
self.project.save()
def _message(self, message):
if not message.endswith('\n'):
message += '\n'
self.build_info.process_log += message
def _log(self, line, prefix='#> '):
self._message(prefix + line.strip('\n\r \t'))
|
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
orm['avocado.DataField'].objects.filter(translator='SNPS Only').update(translator='Allow Nulls')
def backwards(self, orm):
"Write your backwards methods here."
orm['avocado.DataField'].objects.filter(translator='Allow Nulls').update(translator='SNPS Only')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 12, 5, 15, 36, 15, 336036)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 12, 5, 15, 36, 15, 335815)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'avocado.datacategory': {
'Meta': {'ordering': "('-parent__id', 'order', 'name')", 'object_name': 'DataCategory'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'_order'", 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['avocado.DataCategory']"}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'avocado.dataconcept': {
'Meta': {'ordering': "('order',)", 'object_name': 'DataConcept'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['avocado.DataCategory']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fields': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'concepts'", 'symmetrical': 'False', 'through': "orm['avocado.DataConceptField']", 'to': "orm['avocado.DataField']"}),
'formatter_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'concepts+'", 'null': 'True', 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'_order'", 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'queryview': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'concepts+'", 'blank': 'True', 'to': "orm['sites.Site']"})
},
'avocado.dataconceptfield': {
'Meta': {'ordering': "('order',)", 'object_name': 'DataConceptField'},
'concept': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'concept_fields'", 'to': "orm['avocado.DataConcept']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'concept_fields'", 'to': "orm['avocado.DataField']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'_order'", 'blank': 'True'})
},
'avocado.datacontext': {
'Meta': {'object_name': 'DataContext'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'composite': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'_count'"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'datacontext+'", 'null': 'True', 'to': "orm['auth.User']"})
},
'avocado.datafield': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_name', 'model_name', 'field_name'),)", 'object_name': 'DataField'},
'app_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['avocado.DataCategory']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'data_source': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'enumerable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'fields+'", 'null': 'True', 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'_order'", 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'searchable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'fields+'", 'blank': 'True', 'to': "orm['sites.Site']"}),
'translator': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'unit_plural': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'})
},
'avocado.dataview': {
'Meta': {'object_name': 'DataView'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'_count'"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dataview+'", 'null': 'True', 'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'genes.exon': {
'Meta': {'object_name': 'Exon', 'db_table': "'exon'"},
'end': ('django.db.models.fields.IntegerField', [], {}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.IntegerField', [], {}),
'start': ('django.db.models.fields.IntegerField', [], {})
},
'genes.gene': {
'Meta': {'object_name': 'Gene', 'db_table': "'gene'"},
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'db_table': "'gene_pubmed'", 'symmetrical': 'False'}),
'chr': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Chromosome']"}),
'families': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.GeneFamily']", 'symmetrical': 'False', 'blank': 'True'}),
'hgnc_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'phenotypes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['phenotypes.Phenotype']", 'through': "orm['genes.GenePhenotype']", 'symmetrical': 'False'}),
'symbol': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'synonyms': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.Synonym']", 'db_table': "'gene_synonym'", 'symmetrical': 'False'})
},
'genes.genefamily': {
'Meta': {'object_name': 'GeneFamily', 'db_table': "'gene_family'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'})
},
'genes.genephenotype': {
'Meta': {'object_name': 'GenePhenotype', 'db_table': "'gene_phenotype'"},
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']"}),
'hgmd_id': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phenotype': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['phenotypes.Phenotype']"})
},
'genes.synonym': {
'Meta': {'object_name': 'Synonym', 'db_table': "'synonym'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'genes.transcript': {
'Meta': {'object_name': 'Transcript', 'db_table': "'transcript'"},
'coding_end': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'coding_end_status': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'coding_start': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'coding_start_status': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'exon_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'exons': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.Exon']", 'db_table': "'transcript_exon'", 'symmetrical': 'False'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'refseq_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'start': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'strand': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'})
},
'genome.chromosome': {
'Meta': {'ordering': "['order']", 'object_name': 'Chromosome', 'db_table': "'chromosome'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'})
},
'literature.pubmed': {
'Meta': {'object_name': 'PubMed', 'db_table': "'pubmed'"},
'pmid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'})
},
'phenotypes.phenotype': {
'Meta': {'object_name': 'Phenotype', 'db_table': "'phenotype'"},
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'symmetrical': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'hpo_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1000'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'variants.effect': {
'Meta': {'ordering': "['order']", 'object_name': 'Effect', 'db_table': "'effect'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'impact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.EffectImpact']", 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.EffectRegion']", 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.effectimpact': {
'Meta': {'ordering': "['order']", 'object_name': 'EffectImpact', 'db_table': "'effect_impact'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.effectregion': {
'Meta': {'ordering': "['order']", 'object_name': 'EffectRegion', 'db_table': "'effect_region'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.evs': {
'Meta': {'object_name': 'EVS', 'db_table': "'evs'"},
'aa_ac_alt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'aa_ac_ref': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'aa_gtc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'aa_maf': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'all_ac_alt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'all_ac_ref': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'all_gtc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'all_maf': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'clinical_association': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'ea_ac_alt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'ea_ac_ref': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'ea_gtc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'ea_maf': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'gts': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'read_depth': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'evs'", 'to': "orm['variants.Variant']"})
},
'variants.functionalclass': {
'Meta': {'ordering': "['order']", 'object_name': 'FunctionalClass', 'db_table': "'variant_functional_class'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.polyphen2': {
'Meta': {'object_name': 'PolyPhen2', 'db_table': "'polyphen2'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'refaa': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polyphen2'", 'to': "orm['variants.Variant']"})
},
'variants.sift': {
'Meta': {'object_name': 'Sift', 'db_table': "'sift'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'refaa': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'varaa': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sift'", 'to': "orm['variants.Variant']"})
},
'variants.thousandg': {
'Meta': {'object_name': 'ThousandG', 'db_table': "'1000g'"},
'aa': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'ac': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'afr_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'amr_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'an': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'asn_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'eur_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'thousandg'", 'to': "orm['variants.Variant']"})
},
'variants.variant': {
'Meta': {'unique_together': "(('chr', 'pos', 'ref', 'alt'),)", 'object_name': 'Variant', 'db_table': "'variant'"},
'alt': ('django.db.models.fields.TextField', [], {'db_index': 'True'}),
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'db_table': "'variant_pubmed'", 'symmetrical': 'False'}),
'chr': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Chromosome']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'liftover': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'md5': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'phenotypes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['phenotypes.Phenotype']", 'through': "orm['variants.VariantPhenotype']", 'symmetrical': 'False'}),
'pos': ('django.db.models.fields.IntegerField', [], {}),
'ref': ('django.db.models.fields.TextField', [], {'db_index': 'True'}),
'rsid': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.VariantType']", 'null': 'True'})
},
'variants.varianteffect': {
'Meta': {'object_name': 'VariantEffect', 'db_table': "'variant_effect'"},
'amino_acid_change': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'codon_change': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.Effect']", 'null': 'True', 'blank': 'True'}),
'exon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Exon']", 'null': 'True', 'blank': 'True'}),
'functional_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.FunctionalClass']", 'null': 'True', 'blank': 'True'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'transcript': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Transcript']", 'null': 'True', 'blank': 'True'}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'effects'", 'null': 'True', 'to': "orm['variants.Variant']"})
},
'variants.variantphenotype': {
'Meta': {'object_name': 'VariantPhenotype', 'db_table': "'variant_phenotype'"},
'hgmd_id': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phenotype': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['phenotypes.Phenotype']"}),
'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variant_phenotypes'", 'to': "orm['variants.Variant']"})
},
'variants.varianttype': {
'Meta': {'ordering': "['order']", 'object_name': 'VariantType', 'db_table': "'variant_type'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '20'})
}
}
complete_apps = ['avocado', 'variants']
|
|
#*******************************************************************
# Copyright 2002-2008 LSI Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#*******************************************************************
#
#--------------------------------------------------------------------
# Convert the DoxData (generate by pyXml2DoxData) into
# wrapper description data.
#--------------------------------------------------------------------
#
from pyCoderClass import *
from pyCoderFunc import *
from pyCoderMain import *
import pprint
import string
import os
import types
import traceback
import copy
import sys
import pyDDDFindTemplateTypes
import pyDDDFindClasses
import pyDDDFunction
import pyDDDParameters
import pyDDDPatchData
# ===================================================================
class cDoxData2Desc:
# ==========================================
def __init__(self):
#----------------------------------------
# List of things to keep
#----------------------------------------
self.mData=[] # Input Data
self.mClasses={} # All the classes
self.mTypedefs={} # All the typedefs
self.mFunctions=[] # All the global functions
self.mEnums={} # All the global enums
self.mDefines={} # All the global defines
self.mVars={} # All the global vars
#----------------------------------------
# List of things to generate
#----------------------------------------
self.mClsData=[] # Classes
self.mFunData=[] # Functions
self.mDefData=[] # Defines
self.mTmpData=[] # Template information
#----------------------------------------
# Define output order of keys in arguments
#----------------------------------------
self.mOutputKeyOrder=['name','type','desc','sig','call','value','params',
'constructors','methods','gets','sets','igets','isets','ops',
]
# ===============================================================
# ===============================================================
# Begin of user definable callbacks
# ==========================================
# ==========================================
# User callback to patch the full dataset before
# any other processing is done
def vPatchDataSet(self):
pass
# ==========================================
# ==========================================
# User callback to skip a class name
def vShouldSkipTemplateClass(self,classname):
return 0
# ==========================================
# ==========================================
# User callback to add additionall template specializations
# data is a dict:
# key - class name
# value - dict: class_specialization_name / list of template param values
def vAddTemplateSpecializations(self,specialized):
pass
# ==========================================
# Support routine for Add a new specialization
def AddTemplateSpecialization(self,specialized,clsname,spllist):
pyDDDFindTemplateTypes._AddTemplateSpecialization(self,specialized,clsname,spllist)
# ==========================================
# Support routine for Propagating specialization
# of derived classes to the bases.
# Return a list of class names to propagate
def vGetTemplatePropagations(self):
return []
# ==========================================
# ==========================================
# User callback to patch the templat map data
def vPathTmpData(self):
pass
# ==========================================
# ==========================================
# User callback to patch the template data
def vCustomizeTemplateData(self):
pass
# ==========================================
# ==========================================
# User callback to skip a class name
def vShouldSkipClass(self,classname):
return 0
# ==========================================
# ==========================================
# User callback to skip a base classname name
# called from a dirived class
def vShouldSkipDerivedClass(self,classname):
return 0
# ==========================================
# ==========================================
# User callback get repr description
def vGetClassRepr(self,classname):
return None
# ==========================================
# ==========================================
# User callback get repr description based on
# a base class
def vGetClassReprByBaseClass(self,classname):
return None
# ==========================================
# ==========================================
# User callback to determine Db classes
def vIsDbClass(self,classname):
return 0
# ==========================================
# ==========================================
# User callback to determine if Typedef should be skipped
def vShouldSkipTypedef(self,name):
return 0
# ==========================================
# ==========================================
# User callback to determine if Function should be skipped
def vShouldSkipFunction(self,name):
return 0
# ==========================================
# ==========================================
# User callback to determine if Define should be skipped
def vShouldSkipDefine(self,name):
return 0
# ==========================================
# ==========================================
# User callback to determine if Enum should be skipped
def vShouldSkipEnum(self,name):
return 0
# ==========================================
# ==========================================
# User callback to determine if Var should be skipped
def vShouldSkipVar(self,name):
return 0
# ==========================================
# ==========================================
# User callback to determine if a global function is manual
def vIsManualGlobalFunction(self,sig):
return 0
# ==========================================
# ==========================================
# User callback to determine if a class function is manual
def vIsManualClassFunction(self,clsname,sig):
return 0
# ==========================================
# ==========================================
# User callback to determine if a global function is skipped
def vIsSkipGlobalFunction(self,sig):
return 0
# ==========================================
# ==========================================
# User callback to determine if a class function is skipped
def vIsSkipClassFunction(self,clsname,sig):
return 0
# ==========================================
# ==========================================
# User callback to determine if a class constructor messages should be skipped
def vIsIgnoreClassConstructor(self,clsname):
return 0
# ==========================================
# ==========================================
# User callback to determine if a class should diss-allow assign
def vIsNoAssignClass(self,clsname):
return 0
# ==========================================
# ==========================================
# User callback to determine if a class should allow virtual
def vAllowVirtualClass(self,clsname):
return 0
# ==========================================
# ==========================================
# User callback to determine if a class function should allow virtual
def vAllowVirtualFunction(self,clsname,sig):
return 0
# ==========================================
# ==========================================
# User callback to customize data before writting
# Use to add iterators
def vCustomizeData(self,clsname,sig):
return 0
# ==========================================
# ==========================================
# User callback to get range check data for operator[] functions
def vGetRangeCheck(self,clsname):
return None
# ==========================================
# ==========================================
# User callback to customize class before final write
# Use to add Iter functions from Collections
def vCustomizeClassFunctions(self,cls):
return None
# ==========================================
# ==========================================
# User callback to add parameters to a constructor call
# Needed for virtual class without a default constructor
def vGetConstructorParameters(self,cls):
return ''
# ==========================================
# ==========================================
# User callback to get class cast data
def vGetClassCastData(self,clsname):
return None
# ==========================================
# ==========================================
# User callback to get record data
def vGetRecordData(self,clsname):
return None
# ==========================================
# ==========================================
# User callback to get record data
def vGetLockData(self,clsname):
return None
# ==========================================
# ==========================================
# User callback to get record data
def vGetMathData(self,clsname):
return None
# ==========================================
# ==========================================
# User callback to get the iterator function name if a class
# is an iterator
def vGetIterFunction(self,clsname):
return None
# ==========================================
# ==========================================
# User callback to get see if class supports compare
def vGetCompareData(self,clsname):
return None
# ==========================================
# ==========================================
# User callback to report data structures
def vReportData(self,name,data):
pass
# ==========================================
# ==========================================
# User callback to get a report file
def vGetReportFile(self,name):
return None
# ==========================================
# ==========================================
# User callback to get parameter hint data
def vGetParameterHintData(clsname,funsig):
return None
# ===============================================================
# ===============================================================
# ==========================================
# Record Typedefs
def FindTypedefs(self,fdata):
for item in fdata['typedefs'].values():
name=item['name']
if (self.mClasses.has_key(name)): continue
if (self.vShouldSkipTypedef(name)): continue
self.mTypedefs[name]=item
# ==========================================
# Record Functions
def FindFunctions(self,fdata):
for item in fdata['functions']:
name=item['name']
if (self.vShouldSkipFunction(name)): continue
self.mFunctions.append(item)
# ==========================================
# Record Defines
def FindDefines(self,fdata):
for item in fdata['defines'].values():
name=item['name']
if (self.vShouldSkipDefine(name)): continue
self.mDefines[name]=item
# ==========================================
# Record Enums
def FindEnums(self,fdata):
for item in fdata['enums'].values():
name=item['name']
if (self.vShouldSkipEnum(name)): continue
self.mEnums[name]=item
# ==========================================
# Record Vars
def FindVars(self,fdata):
for item in fdata['vars'].values():
name=item['name']
if (self.vShouldSkipVar(name)): continue
self.mVars[name]=item
# ==========================================
# Process File Data to find typedefs,function,enums,defines
def ProcessFileData(self):
sys.stderr.write("Processing Global Data:\n")
for item in self.mData:
if (item['category']=='file'):
self.FindTypedefs(item)
self.FindFunctions(item)
self.FindEnums(item)
self.FindDefines(item)
self.FindVars(item)
# ===============================================================
# ==========================================
# Build Enums - Generate output data for enums
def BuildEnums(self):
for enum in self.mEnums.values():
name=enum['name']
values=""
texts=""
slist=""
for item in enum['values']:
values+=(" "+item['name'])
texts+=(" "+item['name'])
if (slist): slist+="\n"
slist+=item['name']
desc=enum['detaileddescription']
if (desc==''):
desc=enum['briefdescription']
if (desc==''):
desc='Enum type '+name
desc+='\nValues:\n'+slist
self.mClsData.append(['cEnumClassDesc',
{'name':name,
'values':values,
'texts':texts,
'desc':desc}])
slist=string.replace(slist,"\n"," ")
# ==========================================
# Build Defines - Generate output data for defines
def BuildDefines(self):
values=[]
for define in self.mDefines.values():
name=define['name']
if (len(define['param'])==0):
value=define['body']
if (value[0]=="'" and value[-1]=="'"):
value=value[1:-1]
desc=define['detaileddescription']
if (desc==''):
desc=define['briefdescription']
if (desc==''):
desc='Define value '+name
desc+=' Value: '+value
values.append({'name':name,
'value':value,
'desc':desc})
# Convert Numeric strings to int values
results={}
for item in values:
value=item['value']
name=item['name']
# allow L and LL
if (value[-1]=='l' or value[-1]=='L'):
value=value[:-1]
if (value[-1]=='l' or value[-1]=='L'):
value=value[:-1]
# allow U - unsigned
if (value[-1]=='u' or value[-1]=='U'):
value=value[:-1]
try:
result=eval(value)
except:
continue
if (type(result)==types.IntType):
item['result']=('int',value)
results[name]=result
elif (type(result)==types.LongType):
item['result']=('long',value)
results[name]=result
elif (type(result)==types.FloatType):
item['result']=('float',value)
results[name]=result
elif (type(result)==types.StringType):
item['result']=('string',str(result))
results[name]=result
# Convert Numeric expressions to int values
for i in range(2):
print '--------'
for item in values:
value=item['value']
name=item['name']
if (item.has_key('result')): continue
value=string.replace(value,"\\","")
try:
result=eval(value,results)
except:
continue
if (type(result)==types.IntType):
item['result']=('int',hex(result))
results[name]=result
elif (type(result)==types.LongType):
result=hex(result)
if (result[-1]=='l' or result[-1]=='L'): result=result[:-1]
if (result[-1]=='l' or result[-1]=='L'): result=result[:-1]
item['result']=('long',result)
results[name]=result
elif (type(result)==types.FloatType):
item['result']=('float',str(result))
results[name]=result
# Generate the output data
for item in values:
name=item['name']
desc=item['desc']
if (item.has_key('result')):
value=item['result']
else:
value=('string',item['value'])
self.mDefData.append(['cDefDesc',
{'name':name,
'value':value,
'desc':desc}])
# ==========================================
# Build Typedef - Generate output data for typedefs
def BuildTypedef(self,item):
name=item['name']
desc=item['detaileddescription']
if (desc==''):
desc=item['briefdescription']
if (desc==''):
desc='Typedef type '+name
if (item['category']=='manual'):
dct={'name':name,
'desc':desc,
'choice':item['choice'],
}
if (item.has_key('template_name')):
dct['typedef']="typedef %s %s"%(
item['template_name'],item['template_typedef'])
if (item.has_key('array')):
dct['array']=item['array']
if (item.has_key('builtin')):
dct['builtin']=1
if (item.has_key('aof')):
dct['aof']=1
self.mClsData.append(['cManualClassDesc',dct])
# All typdefs must currently have a manual mapping
#elif (item['category']=='typedef'):
# dct={'name':name,
# 'desc':desc,
# }
# if (item.has_key('array')):
# dct['array']=item['array']
# self.mClsData.append(['cNativeClassDesc',dct])
elif (item['category']=='typedef'):
print "** Skipping Typedef: %s"%name
else:
raise "Unknown/Unsupported thing in BuildTypedef: %s/%s"%(item['category'],str(name))
# ==========================================
# Build Typedefs
def BuildTypedefs(self):
for typedef in self.mTypedefs.values():
self.BuildTypedef(typedef)
# ==========================================
# Build Functions
def PrepareGlobalFunctions(self):
newfuns=[]
for fun in self.mFunctions:
funname=fun['name']
if (funname[0:8]=='operator'):
print "Skipping Global Operator Function: %s"%funname
continue
desc=fun['detaileddescription']
if (desc==''):
desc=fun['briefdescription']
if (desc==''):
desc='Function '+funname
fun['desc']=desc
params,opt=pyDDDParameters.BuildParams(self,fun,'global')
# Expand optional params
for i in opt:
newfun=copy.copy(fun)
prms=params[:i]
newfun['params']=prms
if (not pyDDDParameters.PrepareReturnValue(self,newfun,'global')):
continue
newfuns.append(newfun)
self.mFunctions=newfuns
# ==========================================
# Build Functions Dct
def BuildFunctionDct(self,fun,manual=0):
dct={'name':fun['name'],
'sig':fun['sig'],
'desc':fun['desc'],
}
if (manual): dct['ismanual']=1
if (fun['params']):
dct['params']=fun['params']
for key in ['value','value_tag','value_utag','value_init','value_ref',
'value_alloc','value_dref','value_borrow','value_lock',
'value_copy', 'value_rmconst',
'range_check','call','ismanual','isiter','virt','const']:
if (fun.has_key(key)):
dct[key]=fun[key]
return dct
# ==========================================
# Build Functions
def BuildGlobalFunctions(self):
groups={}
for fun in self.mFunctions:
funname=fun['name']
group=groups.get(funname,[])
group.append(fun)
groups[funname]=group
for funname,group in groups.items():
manual=0
for fun in group:
if (fun.has_key('manual')): manual=1
fung=[]
for fun in group:
dct=self.BuildFunctionDct(fun,manual)
fung.append(['cGlobalFunDesc',dct])
dct={'name':funname,
'funs':fung,
}
self.mFunData.append(['cFunGroupDesc',dct])
# ==========================================
# Prepare Class Functions (members)
def PrepareClassFunctions(self,cls):
clsname=cls['name']
desname='~'+clsname
cons=[]
funs=[]
igets=[]
isets=[]
ops=[]
#----------------------------------------------
# Walk all the functions
for fun in cls['functions']:
funname=fun['name']
funnamesplit=string.split(funname)
if (funname==desname):
continue # destructor
desc=fun['detaileddescription']
if (desc==''):
desc=fun['briefdescription']
if (desc==''):
if (funname==clsname):
desc='Constructor '+funname
else:
desc='Function '+funname
fun['desc']=desc
#----------------------------------------------
# Expand optional params
params,opt=pyDDDParameters.BuildParams(self,fun,clsname)
for i in opt:
prms=params[:i]
newfun=copy.copy(fun)
newfun['params']=prms
funsig=newfun['sig']
funvirt=newfun['virt']
funconst=newfun['const']
if (funname==clsname):
cons.append(newfun)
continue
pvalue=newfun['type_link']
rtag=pvalue['utag']
rcls=pvalue['cls']
#----------------------------------------------
# Check for output value
if (not pyDDDParameters.PrepareReturnValue(self,newfun,clsname)):
continue
#----------------------------------------------
# Check for index function
if (funname=='operator[]'):
if (funvirt=='virtual'):
print "** Index Function is virtual: %s, in Class: %s Sig: %s"%(funname,clsname,funsig)
continue
newfun['call']=funname
range_check=self.vGetRangeCheck(clsname)
if (not range_check):
print "** Range Check not specified for Class: %s Sig: %s"%(clsname,funsig)
continue
newfun['range_check']=range_check
if (rtag=='ref'):
newfun['name']='index_set'
isets.append(newfun)
newfun=copy.copy(newfun)
newfun['name']='index_get'
igets.append(newfun)
elif (rtag=='cref' or rtag=='simple'):
newfun['name']='index_get'
igets.append(newfun)
else:
print "** Bad return type on operator[] for Class: %s Sig: %s"%(clsname,funsig)
continue
#----------------------------------------------
# Check for operator typecast function
elif (funname[0:9]=='operator '):
if (funvirt=='virtual'):
print "** Operator Function is virtual: %s, in Class: %s Sig: %s"%(funname,clsname,funsig)
continue
newfun['call']=funname
newname=funname[9:]
newname=string.replace(newname,'<',' <')
newname=string.replace(newname,'*',' *')
index=string.find(newname,' ')
if (index>=0): newname=newname[:index]
newfun['name']=newname
funs.append(newfun)
#----------------------------------------------
# Check for operator function
elif (funname[0:8]=='operator'):
if (funvirt=='virtual'):
print "** Operator Function is virtual: %s, in Class: %s Sig: %s"%(funname,clsname,funsig)
continue
newfun['call']=funname
newfun['name']=string.replace(funname,' ','_')
ops.append(newfun)
#----------------------------------------------
# Otherwi:use normal function
else:
funs.append(newfun)
#----------------------------------------------
# Record all data
if (len(igets)>1):
raise str("Multiple Get operator[] found in Class: %s"%clsname)
if (len(isets)>1):
raise str("Multiple Set operator[] found in Class: %s"%clsname)
cls['constructors']=cons
cls['methods']=funs
cls['igets']=igets
cls['isets']=isets
for fun in ops:
if (fun['name']=='operator==' and self.vGetCompareData(clsname)): continue
if (fun['name']=='operator!=' and self.vGetCompareData(clsname)): continue
print "Skipping Operator Function: %s, in Class: %s"%(fun['name'],clsname)
# ==========================================
# Build Class Functions (members)
def BuildClassFunctions(self,cls):
clsname=cls['name']
clsdct=cls['data']
cons=cls['constructors']
funs=cls['methods']
igets=cls['igets']
isets=cls['isets']
#----------------------------------------------
groups={}
for fun in funs:
funname=fun['name']
group=groups.get(funname,[])
group.append(fun)
groups[funname]=group
genfuns=[]
for funname,group in groups.items():
fung=[]
manual=0
for fun in group:
if (fun.has_key('manual')): manual=1
for fun in group:
dct=self.BuildFunctionDct(fun,manual)
fung.append(['cFunDesc',dct])
dct={'name':funname,
'funs':fung,
}
genfuns.append(['cFunGroupDesc',dct])
if (genfuns): clsdct['methods']=genfuns
#----------------------------------------------
if (igets):
dct=self.BuildFunctionDct(igets[0])
genigets=[ ['cIndexGetFunDesc',dct] ]
clsdct['igets']=genigets
#----------------------------------------------
if (isets):
dct=self.BuildFunctionDct(isets[0])
genisets=[ ['cIndexSetFunDesc',dct] ]
clsdct['isets']=genisets
#----------------------------------------------
if (cons):
gencons=[]
manual=0
for fun in cons:
if (fun.has_key('manual')): manual=1
for fun in cons:
dct=self.BuildFunctionDct(fun,manual)
gencons.append(['cConDesc',dct])
clsdct['constructors']=gencons
# ==========================================
# Check Class Functions
# Find duplicate name and parameter functions
def CheckClassFunctions(self,cls):
#---------------------------------
# Rename Ref Functions
newlist=[]
for fun in cls['methods']:
name=fun['name']
if (name[0:8]=='operator'):
newlist.append(fun)
continue
if (fun.get('value_utag','')=='cref' or fun.get('value_utag','')=='simple'):
drop=0
for fun2 in cls['methods']:
if (fun2['name']!=name): continue
if (fun2.get('value_utag','')=='ref'):
drop=1
break
if (not drop):
newlist.append(fun)
else:
newlist.append(fun)
cls['methods']=newlist
#---------------------------------
return
groups={}
for fun in cls['methods']:
funname=fun['name']
group=groups.get(funname,[])
group.append(fun)
groups[funname]=group
for funname,group in groups.items():
cases={}
for fun in group:
sig=fun['sig']
parts=string.split(sig,'|')
params=parts[2]
data=cases.get(params,[])
data.append(parts)
cases[params]=data
for param,data in cases.items():
if (len(data)==1): continue
print "** Found Duplicate Functions in class:",cls['name'],'Function:',funname,'Cases:',str(data)
# ==========================================
# Build Class SFunctions (static functions)
def PrepareClassStaticFunctions(self,cls):
clsname=cls['name']
newfuns=[]
for fun in cls['sfunctions']:
funname=fun['name']
desc=fun['detaileddescription']
if (desc==''):
desc=fun['briefdescription']
if (desc==''):
desc='Function '+funname
fun['desc']=desc
params,opt=pyDDDParameters.BuildParams(self,fun,clsname)
# Expand optional params
for i in opt:
newfun=copy.copy(fun)
prms=params[:i]
newfun['params']=prms
if (not pyDDDParameters.PrepareReturnValue(self,newfun,clsname)):
continue
newfuns.append(newfun)
cls['static_methods']=newfuns
# ==========================================
# Build Class SFunctions (static functions)
def BuildClassStaticFunctions(self,cls):
clsname=cls['name']
clsdct=cls['data']
groups={}
for fun in cls['static_methods']:
funname=fun['name']
group=groups.get(funname,[])
group.append(fun)
groups[funname]=group
fung=[]
for funname,group in groups.items():
manual=0
funs=[]
for fun in group:
if (fun.has_key('manual')): manual=1
for fun in group:
dct=self.BuildFunctionDct(fun,manual)
funs.append(['cStaticFunDesc',dct])
dct={'name':funname,
'funs':funs,
}
fung.append(['cFunGroupDesc',dct])
if (fung): clsdct['static_methods']=fung
# ==========================================
# Check Class Constructor
def CheckClassConstructor(self,cls):
clsname=cls['name']
clsdct=cls['data']
base=cls
#--------------------------
# Find Constructor
found=0
for fun in cls['functions']:
funname=fun['name']
if (funname!=clsname): continue
found=1
if (not found):
clsdct['class_init']=self.vGetConstructorParameters(clsname)
#--------------------------
dt=self.vGetClassCastData(clsname)
if (dt):
clsdct['class_casts']=dt
#--------------------------
if (cls.has_key('template_name')):
clsdct['typedef']="typedef %s %s"%(
cls['template_name'],cls['template_typedef'])
#--------------------------
if (cls['db']):
clsdct['typedefdb']="typedef %s* %sp"%(clsname,clsname)
clsdct['typename']='%sp'%(clsname)
else:
clsdct['typename']=clsname
#--------------------------
record=self.vGetRecordData(clsname)
if (record):
clsdct['record']=record
#--------------------------
lock=self.vGetLockData(clsname)
if (lock):
clsdct['lock']=lock
#--------------------------
math=self.vGetMathData(clsname)
if (math):
clsdct['manmath']=1
# ==========================================
# Prepare Classes
def PrepareClasses(self):
clsnames=self.mClasses.keys()
clsnames.sort()
for key in clsnames:
cls=self.mClasses[key]
name=cls['name']
realname=cls.get('template_typedef',name)
if (cls['category']=='manual'):
continue
desc=cls['detaileddescription']
if (desc==''):
desc=cls['briefdescription']
if (desc==''):
desc='Class type '+realname
dct={'name':realname,
'desc':desc,
}
cls['data']=dct
self.PrepareClassFunctions(cls)
self.PrepareClassStaticFunctions(cls)
self.vCustomizeClassFunctions(cls)
self.CheckClassFunctions(cls)
# ==========================================
# Build Classes
def BuildClasses(self):
clsnames=self.mClasses.keys()
clsnames.sort()
for key in clsnames:
cls=self.mClasses[key]
clsname=cls['name']
realname=cls.get('template_typedef',clsname)
if (cls['category']=='manual'):
self.BuildTypedef(cls)
continue
self.BuildClassFunctions(cls)
self.BuildClassStaticFunctions(cls)
self.CheckClassConstructor(cls)
dct=cls['data']
if (cls.has_key('array')):
dct['array']=cls['array']
if (cls.has_key('aof')):
dct['aof']=1
if (cls.has_key('virt')):
dct['virt']=1
if (cls.has_key('pure-virt')):
dct['pure_virt']=1
if (cls['base']!=None):
dct['base']=cls['base']['name']
if (cls.has_key('repr')):
dct['repr']=cls['repr']
if (cls.has_key('varlock')):
dct['lock']=1
if (cls.has_key('varnovalidate')):
dct['novalidate']=1
if (cls.has_key('no_copy')):
dct['no_copy']=1
if (cls.has_key('has_copy')):
dct['has_copy']=1
if (cls.has_key('no_assign')):
dct['no_assign']=1
if (cls['db']):
self.mClsData.append(['cDbClassDesc',dct])
else:
funname=self.vGetIterFunction(clsname)
isitr=None
if (funname!=None):
for fun in cls['functions']:
if (fun['name']==funname):
isitr=fun
break
if (isitr!=None):
fun=isitr
rcls=fun['type_link']['cls']
#sys.stderr.write('%s %s\n'%(str(fun['type_link']),str(rcls)))
dct['type']=rcls['name']
dct['fun']=funname
self.mClsData.append(['cIterClassDesc',dct])
else:
compare=self.vGetCompareData(clsname)
if (compare):
dct['compare']='object'
self.mClsData.append(['cAllocClassDesc',dct])
# ===============================================================
# ==========================================
def WriteDict(self,key,dct,indent):
first=1
self.mFp.write("%*s%s(\n"%(indent,"",key))
keys=dct.keys()
orderkeys=[]
for key in self.mOutputKeyOrder:
if key in keys:
orderkeys.append(key)
keys.remove(key)
for key in keys:
orderkeys.append(key)
for key in orderkeys:
value=dct[key]
if (first): first=0
else: self.mFp.write(",\n")
self.mFp.write("%*s%s="%(indent+4,"",key))
self.WriteValue(value,indent)
self.mFp.write(")")
# ==========================================
def WriteList(self,lst,indent):
first=1
self.mFp.write("[\n")
for key,dct in lst:
if (first): first=0
else: self.mFp.write(",\n")
self.WriteDict(key,dct,indent+4)
self.mFp.write("]")
# ==========================================
def WriteTuple(self,lst,indent):
first=1
self.mFp.write("(\n")
for value in lst:
if (first): first=0
else: self.mFp.write("\n")
self.mFp.write("%*s"%(indent+4,''))
self.WriteValue(value,indent)
self.mFp.write(",")
self.mFp.write(")")
# ==========================================
def WriteValue(self,value,indent):
if (type(value)==types.ListType):
self.WriteList(value,indent+4)
elif (type(value)==types.TupleType):
self.WriteTuple(value,indent+4)
elif (type(value)==types.DictType):
self.WriteList(value,indent+4)
else:
if (type(value)==types.IntType):
self.mFp.write("%d"%value)
else:
value=repr(value)
self.mFp.write("%s"%value)
# ==========================================
# Write the output data to its file
def WriteOutput(self,file):
print "Writting Description File:",file
sys.stderr.write("Writting Description File: %s\n"%file)
self.mFp=open(file,"w")
self.mFp.write("""\
#*******************************************************************
# Copyright 2002-2008 LSI Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#*******************************************************************
#
#--------------------------------------------------------------------
# The following code description was generated by pyDoxData2Desc.
# This used the data descriptions generated by pyXml2DoxData.
#--------------------------------------------------------------------
from pyCoderClass import *
from pyCoderFunc import *
from pyCoderMain import *
Data={
'clsdata':""")
# pprint.pprint(self.mClsData,self.mFp)
self.WriteList(self.mClsData,0)
self.mFp.write(""",
'fundata':""")
self.WriteList(self.mFunData,0)
self.mFp.write(""",
'defdata':""")
self.WriteList(self.mDefData,0)
self.mFp.write(""",
'tmpdata':""")
self.WriteTuple(self.mTmpData,0)
self.mFp.write(""",
}
""")
self.mFp.close()
# ===============================================================
# ==========================================
def ProcessData(self,file):
print "Start Processing"
sys.stderr.write("Start Processing\n")
# Classify the input data
pyDDDPatchData.PatchData(self)
pyDDDFindTemplateTypes.RemoveEmptyData(self)
pyDDDFindTemplateTypes.ExpandTemplateDefaults(self)
pyDDDFindTemplateTypes.FindTemplateTypes(self)
pyDDDFindTemplateTypes.ExpandTemplateTypes(self)
self.vCustomizeTemplateData()
pyDDDFindClasses.FindClasses(self)
self.ProcessFileData()
# Link Inheritance
pyDDDFindClasses.FindClassBases(self)
pyDDDFunction.LinkAllFunctions(self)
self.vCustomizeData()
# TBD self.LinkVars()
# Build Data
sys.stderr.write("Building Description Data:\n")
self.BuildEnums()
self.BuildDefines()
self.BuildTypedefs()
self.PrepareGlobalFunctions()
self.BuildGlobalFunctions()
self.PrepareClasses()
self.BuildClasses()
# TBD self.BuildVars()
self.WriteOutput(file)
# ==========================================
# Read a set of .py files from the given directory
def ReadInputFiles(self,indir,files):
print "Reading Files from:",indir
sys.stderr.write("Reading Files from: %s\n"%indir)
cwd=os.getcwd()
sys.path.append(cwd)
for file in files:
cmd="from %s.%s import data"%(indir,file)
NS={}
exec cmd in NS
for item in NS['data']:
self.mData.append(item)
# ===================================================================
# ===================================================================
|
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ADB protocol implementation.
Implements the ADB protocol as seen in android's adb/adbd binaries, but only the
host side.
"""
import collections
import inspect
import logging
import Queue
import struct
import threading
import time
from adb import usb_exceptions
_LOG = logging.getLogger('adb.low')
_LOG.setLevel(logging.ERROR)
class InvalidResponseError(IOError):
"""Got an invalid command over USB."""
def __init__(self, message, header):
super(InvalidResponseError, self).__init__('%s: %s' % (message, header))
self.header = header
def ID2Wire(name):
assert len(name) == 4 and isinstance(name, str), name
assert all('A' <= c <= 'Z' for c in name), name
return sum(ord(c) << (i * 8) for i, c in enumerate(name))
def Wire2ID(encoded):
assert isinstance(encoded, int), encoded
name = (
chr(encoded & 0xff) +
chr((encoded >> 8) & 0xff) +
chr((encoded >> 16) & 0xff) +
chr(encoded >> 24))
if not all('A' <= c <= 'Z' for c in name):
return 'XXXX'
return name
def _CalculateChecksum(data):
"""The checksum is just a sum of all the bytes. I swear."""
return sum(ord(d) for d in data) & 0xFFFFFFFF
class AuthSigner(object):
"""Signer for use with authenticated ADB, introduced in 4.4.x/KitKat."""
def Sign(self, data):
"""Signs given data using a private key."""
raise NotImplementedError()
def GetPublicKey(self):
"""Returns the public key in PEM format without headers or newlines."""
raise NotImplementedError()
class _AdbMessageHeader(collections.namedtuple(
'_AdbMessageHeader',
['command', 'arg0', 'arg1', 'data_length', 'data_checksum'])):
"""The raw wire format for the header only.
Protocol Notes
local_id/remote_id:
Turns out the documentation is host/device ambidextrous, so local_id is the
id for 'the sender' and remote_id is for 'the recipient'. So since we're
only on the host, we'll re-document with host_id and device_id:
OPEN(host_id, 0, 'shell:XXX')
READY/OKAY(device_id, host_id, '')
WRITE(0, host_id, 'data')
CLOSE(device_id, host_id, '')
"""
_VALID_IDS = ('AUTH', 'CLSE', 'CNXN', 'FAIL', 'OKAY', 'OPEN', 'SYNC', 'WRTE')
# CNXN constants for arg0.
VERSION = 0x01000000
# AUTH constants for arg0.
AUTH_TOKEN = 1
AUTH_SIGNATURE = 2
AUTH_RSAPUBLICKEY = 3
@classmethod
def Make(cls, command_name, arg0, arg1, data):
assert command_name in cls._VALID_IDS
assert isinstance(arg0, int), arg0
assert isinstance(arg1, int), arg1
assert isinstance(data, str), repr(data)
return cls(
ID2Wire(command_name), arg0, arg1, len(data), _CalculateChecksum(data))
@classmethod
def Unpack(cls, message):
try:
command, arg0, arg1, data_length, data_checksum, magic = struct.unpack(
'<6I', message)
except struct.error:
raise InvalidResponseError('Unable to unpack ADB message', message)
hdr = cls(command, arg0, arg1, data_length, data_checksum)
expected_magic = command ^ 0xFFFFFFFF
if magic != expected_magic:
raise InvalidResponseError(
'Invalid magic %r != %r' % (magic, expected_magic), hdr)
if hdr.command_name == 'XXXX':
raise InvalidResponseError('Unknown command', hdr)
if hdr.data_length < 0:
raise InvalidResponseError('Invalid data length', hdr)
return hdr
@property
def Packed(self):
"""Returns this message in an over-the-wire format."""
magic = self.command ^ 0xFFFFFFFF
# An ADB message is 6 words in little-endian.
return struct.pack(
'<6I', self.command, self.arg0, self.arg1, self.data_length,
self.data_checksum, magic)
@property
def command_name(self):
return Wire2ID(self.command)
def str_partial(self):
command_name = self.command_name
arg0 = self.arg0
arg1 = self.arg1
if command_name == 'AUTH':
if arg0 == self.AUTH_TOKEN:
arg0 = 'TOKEN'
elif arg0 == self.AUTH_SIGNATURE:
arg0 = 'SIGNATURE'
elif arg0 == self.AUTH_RSAPUBLICKEY:
arg0 = 'RSAPUBLICKEY'
if arg1 != 0:
raise InvalidResponseError(
'Unexpected arg1 value (0x%x) on AUTH packet' % arg1, self)
return '%s, %s' % (command_name, arg0)
elif command_name == 'CNXN':
if arg0 == self.VERSION:
arg0 = 'v1'
arg1 = 'pktsize:%d' % arg1
return '%s, %s, %s' % (command_name, arg0, arg1)
def __str__(self):
return '%s, %d' % (self.str_partial(), self.data_length)
class _AdbMessage(object):
"""ADB message class including the data."""
def __init__(self, header, data=''):
self.header = header
self.data = data
def Write(self, usb, timeout_ms=None):
"""Send this message over USB."""
# We can't merge these 2 writes, the device wouldn't be able to read the
# packet.
try:
usb.BulkWrite(self.header.Packed, timeout_ms)
usb.BulkWrite(self.data, timeout_ms)
finally:
self._log_msg(usb)
@classmethod
def Read(cls, usb, timeout_ms=None):
"""Reads one _AdbMessage.
Returns None if it failed to read the header with a ReadFailedError.
"""
packet = usb.BulkRead(24, timeout_ms)
hdr = _AdbMessageHeader.Unpack(packet)
if hdr.data_length:
data = usb.BulkRead(hdr.data_length, timeout_ms)
assert len(data) == hdr.data_length, (len(data), hdr.data_length)
actual_checksum = _CalculateChecksum(data)
if actual_checksum != hdr.data_checksum:
raise InvalidResponseError(
'Received checksum %s != %s' % (actual_checksum, hdr.data_checksum),
hdr)
else:
data = ''
msg = cls(hdr, data)
msg._log_msg(usb)
return msg
@classmethod
def Make(cls, command_name, arg0, arg1, data):
return cls(_AdbMessageHeader.Make(command_name, arg0, arg1, data), data)
def _log_msg(self, usb):
_LOG.debug(
'%s.%s(%s)',
'/'.join(str(i) for i in usb.port_path), inspect.stack()[1][3], self)
def __str__(self):
if self.data:
data = repr(self.data)
if len(data) > 128:
data = data[:128] + u'\u2026\''
return '%s, %s' % (self.header.str_partial(), data)
return self.header.str_partial()
class _AdbConnection(object):
"""One logical ADB connection to a service."""
class _MessageQueue(object):
def __init__(self, manager, timeout_ms=None):
self._queue = Queue.Queue()
self._manager = manager
self._timeout_ms = timeout_ms
def __iter__(self):
return self
def next(self):
while True:
try:
i = self._queue.get_nowait()
except Queue.Empty:
# Will reentrantly call self._Add() via parent._OnRead()
if not self._manager.ReadAndDispatch(timeout_ms=self._timeout_ms):
# Failed to read from the device, the connection likely dropped.
raise StopIteration()
continue
if isinstance(i, StopIteration):
raise i
return i
def _Add(self, message):
self._queue.put(message.data)
def _Close(self):
self._queue.put(StopIteration())
def __init__(self, manager, local_id, service_name, timeout_ms=None):
# ID as given by the remote device.
self.remote_id = 0
# Service requested on the remote device.
self.service_name = service_name
# Self assigned local ID.
self._local_id = local_id
self._yielder = self._MessageQueue(manager, timeout_ms=timeout_ms)
self._manager = manager
@property
def local_id(self):
"""Local connection ID as sent to adbd."""
return self._local_id
def __iter__(self):
# If self._yielder is None, it means it has already closed. Return a fake
# iterator with nothing in it.
return self._yielder or []
def Make(self, command_name, data):
return _AdbMessage.Make(command_name, self._local_id, self.remote_id, data)
def _Write(self, command_name, data):
assert len(data) <= self.max_packet_size, '%d > %d' % (
len(data), self.max_packet_size)
self.Make(command_name, data).Write(self._manager._usb)
def Close(self):
"""User initiated stream close.
It's rare that the user needs to do this.
"""
try:
self._Write('CLSE', '')
for _ in self:
pass
except (usb_exceptions.ReadFailedError, usb_exceptions.WriteFailedError):
# May get a LIBUSB_ERROR_TIMEOUT
pass
@property
def max_packet_size(self):
return self._manager.max_packet_size
@property
def port_path(self):
return self._manager.port_path
def _HasClosed(self):
"""Must be called with the manager lock held."""
if self._yielder:
self._yielder._Close()
self._yielder = None
self._manager._UnregisterLocked(self._local_id)
def _OnRead(self, message):
"""Calls from within ReadAndDispatch(), so the manager lock is held."""
# Can be CLSE, OKAY or WRTE. It's generally basically an ACK.
cmd_name = message.header.command_name
if message.header.arg0 != self.remote_id and cmd_name != 'CLSE':
# We can't assert that for now. TODO(maruel): Investigate the one-off
# cases.
logging.warning(
'Unexpected remote ID: expected %d: %s', self.remote_id, message)
if message.header.arg1 != self._local_id:
raise InvalidResponseError(
'Unexpected local ID: expected %d' % self._local_id, message)
if cmd_name == 'CLSE':
self._HasClosed()
return
if cmd_name == 'OKAY':
return
if cmd_name == 'WRTE':
try:
self._Write('OKAY', '')
except usb_exceptions.WriteFailedError as e:
_LOG.info('%s._OnRead(): Failed to reply OKAY: %s', self.port_path, e)
self._yielder._Add(message)
return
if cmd_name == 'AUTH':
self._manager._HandleAUTH(message)
return
if cmd_name == 'CNXN':
self._manager.HandleCNXN(message)
return
# Unexpected message.
assert False, message
# Adaptors.
def Write(self, data):
self._Write('WRTE', data)
def ReadUntil(self, _):
return 'WRTE', self._yielder.next()
class AdbConnectionManager(object):
"""Multiplexes the multiple connections."""
# Maximum amount of data in an ADB packet. Value of MAX_PAYLOAD_V2 in adb.h.
MAX_ADB_DATA = 256*1024
def __init__(self, usb, banner, rsa_keys, auth_timeout_ms):
# Constants.
self._usb = usb
self._host_banner = banner
self._rsa_keys = rsa_keys
self._auth_timeout_ms = auth_timeout_ms
self._lock = threading.Lock()
# As defined by the device.
self.max_packet_size = 0
# Banner replied in CNXN packet.
self.state = None
# Multiplexed stream handling.
self._connections = {}
self._next_local_id = 16
@classmethod
def Connect(cls, usb, banner, rsa_keys, auth_timeout_ms):
"""Establish a new connection to the device.
Args:
usb: A USBHandle with BulkRead and BulkWrite methods. Takes ownership of
the handle, it will be closed by this instance.
rsa_keys: List of AuthSigner subclass instances to be used for
authentication. The device can either accept one of these via the Sign
method, or we will send the result of GetPublicKey from the first one
if the device doesn't accept any of them.
banner: A string to send as a host identifier.
auth_timeout_ms: Timeout to wait for when sending a new public key. This
is only relevant when we send a new public key. The device shows a
dialog and this timeout is how long to wait for that dialog. If used
in automation, this should be low to catch such a case as a failure
quickly; while in interactive settings it should be high to allow
users to accept the dialog. We default to automation here, so it's low
by default.
Returns:
An AdbConnection.
"""
assert isinstance(rsa_keys, (list, tuple)), rsa_keys
assert len(rsa_keys) <= 10, 'adb will sleep 1s after each key above 10'
# pylint: disable=protected-access
self = cls(usb, banner, rsa_keys, auth_timeout_ms)
self._Connect()
return self
@property
def port_path(self):
return self._usb.port_path
def Open(self, destination, timeout_ms=None):
"""Opens a new connection to the device via an OPEN message.
Args:
destination: The service:command string.
Returns:
The local connection object to use.
Yields:
The responses from the service if used as such.
"""
with self._lock:
next_id = self._next_local_id
self._next_local_id += 1
conn = _AdbConnection(self, next_id, destination, timeout_ms=timeout_ms)
conn._Write('OPEN', destination + '\0')
with self._lock:
self._connections[conn.local_id] = conn
# TODO(maruel): Timeout.
# Reads until we got the proper remote id.
while True:
msg = _AdbMessage.Read(self._usb, timeout_ms)
if msg.header.arg1 == conn.local_id:
conn.remote_id = msg.header.arg0
conn._OnRead(msg)
if msg.header.arg1 == conn.local_id:
return conn
def Close(self):
"""Also closes the usb handle."""
with self._lock:
conns = self._connections.values()
for conn in conns:
conn._HasClosed()
with self._lock:
assert not self._connections, self._connections
self._usb.Close()
def StreamingCommand(self, service, command='', timeout_ms=None):
"""One complete set of USB packets for a single connection for a single
command.
Sends service:command in a new connection, reading the data for the
response. All the data is held in memory, large responses will be slow and
can fill up memory.
Args:
service: The service on the device to talk to.
command: The command to send to the service.
timeout_ms: Timeout for USB packets, in milliseconds.
"""
return self.Open('%s:%s' % (service, command), timeout_ms).__iter__()
def Command(self, service, command='', timeout_ms=None):
return ''.join(self.StreamingCommand(service, command, timeout_ms))
def ReadAndDispatch(self, timeout_ms=None):
"""Receive a response from the device."""
with self._lock:
try:
msg = _AdbMessage.Read(self._usb, timeout_ms)
except usb_exceptions.ReadFailedError as e:
# adbd could be rebooting, etc. Return None to signal that this kind of
# failure is expected.
_LOG.info(
'%s.ReadAndDispatch(): Masking read error %s', self.port_path, e)
return False
conn = self._connections.get(msg.header.arg1)
if not conn:
# It's likely a tored down connection from a previous ADB instance,
# e.g. pkill adb.
# TODO(maruel): It could be a spurious CNXN. In that case we're better
# to cancel all the known _AdbConnection and start over.
_LOG.error(
'%s.ReadAndDispatch(): Got unexpected connection, dropping: %s',
self.port_path, msg)
return False
conn._OnRead(msg)
return True
def _Connect(self):
"""Connect to the device."""
with self._lock:
reply = None
start = time.time()
nb = 0
_LOG.debug('Emptying the connection first')
while True:
try:
msg = _AdbMessage.Read(self._usb, 20)
except usb_exceptions.ReadFailedError:
break
nb += 1
if msg.header.command_name in ('AUTH', 'CNXN'):
# Assert the message has the expected host.
reply = msg
else:
conn = self._connections.get(msg.header.arg1)
if conn:
conn._OnRead(msg)
_LOG.info(
'%s._Connect(): Flushed %d messages in %.1fs',
self.port_path, nb, time.time() - start)
if not reply:
msg = _AdbMessage.Make(
'CNXN', _AdbMessageHeader.VERSION, self.MAX_ADB_DATA,
'host::%s\0' % self._host_banner)
msg.Write(self._usb)
reply = _AdbMessage.Read(self._usb)
if reply.header.command_name == 'AUTH':
self._HandleAUTH(reply)
else:
self._HandleCNXN(reply)
def _HandleAUTH(self, reply):
# self._lock must be held.
if not self._rsa_keys:
raise usb_exceptions.DeviceAuthError(
'Device authentication required, no keys available.')
# Loop through our keys, signing the last data which is the challenge.
for rsa_key in self._rsa_keys:
reply = self._HandleReplyChallenge(rsa_key, reply, self._auth_timeout_ms)
if reply.header.command_name == 'CNXN':
break
if reply.header.command_name == 'AUTH':
# None of the keys worked, so send a public key. This will prompt to the
# user.
msg = _AdbMessage.Make(
'AUTH', _AdbMessageHeader.AUTH_RSAPUBLICKEY, 0,
self._rsa_keys[0].GetPublicKey() + '\0')
msg.Write(self._usb)
try:
reply = _AdbMessage.Read(self._usb, self._auth_timeout_ms)
except usb_exceptions.ReadFailedError as e:
if e.usb_error.value == -7: # Timeout.
raise usb_exceptions.DeviceAuthError(
'Accept auth key on device, then retry.')
raise
self._HandleCNXN(reply)
def _HandleCNXN(self, reply):
# self._lock must be held.
if reply.header.command_name != 'CNXN':
raise usb_exceptions.DeviceAuthError(
'Accept auth key on device, then retry.')
if reply.header.arg0 != _AdbMessageHeader.VERSION:
raise InvalidResponseError('Unknown CNXN response', reply)
self.state = reply.data
self.max_packet_size = reply.header.arg1
_LOG.debug(
'%s._HandleCNXN(): max packet size: %d',
self.port_path, self.max_packet_size)
for conn in self._connections.itervalues():
conn._HasClosed()
self._connections = {}
def _HandleReplyChallenge(self, rsa_key, reply, auth_timeout_ms):
# self._lock must be held.
if (reply.header.arg0 != _AdbMessageHeader.AUTH_TOKEN or
reply.header.arg1 != 0 or
reply.header.data_length != 20 or
len(reply.data) != 20):
raise InvalidResponseError('Unknown AUTH response', reply)
msg = _AdbMessage.Make(
'AUTH', _AdbMessageHeader.AUTH_SIGNATURE, 0, rsa_key.Sign(reply.data))
msg.Write(self._usb)
return _AdbMessage.Read(self._usb, auth_timeout_ms)
def _Unregister(self, conn_id):
with self._lock:
self._UnregisterLocked(conn_id)
def _UnregisterLocked(self, conn_id):
# self._lock must be held.
self._connections.pop(conn_id, None)
|
|
#!/usr/bin/env python
"""
Copyright (c) Steinwurf ApS 2016.
All Rights Reserved
Distributed under the "BSD License". See the accompanying LICENSE.rst file.
run ratemon.py with <wlan device> as interface
"""
from __future__ import print_function
import sys
import os
import time
import datetime
import argparse
import socket
import re
import curses
import subprocess
import dpkt
import pcapy
def mac_string(mac):
"""Convert mac to string."""
return ':'.join('{0:02X}'.format(ord(b)) for b in mac)
class ratemon():
"""Monitor object."""
def __init__(self, interface, timeout_ms=250):
"""Initialize monitor object."""
self.captured = 0
self.stations = {}
self.alias = {}
self.ips = {}
self.stale_time = 0
self.dead_time = 0
self.interface = interface
self.only_alias = False
self.prog = sys.argv[0].replace('./', '')
# Setup capture
self.pc = pcapy.open_live(interface, 65536, 1, timeout_ms)
def set_screen(self, screen):
"""Set the screen."""
self.screen = screen
def set_stale_time(self, stale_time):
"""Set stale time."""
self.stale_time = stale_time
def set_dead_time(self, dead_time):
"""Set dead time."""
self.dead_time = dead_time
def set_only_alias(self, only_alias):
"""Set set only alias."""
self.only_alias = only_alias
def update_ip_list(self):
"""Update the ip list."""
output = subprocess.check_output(['ip', 'neighbor', 'show'])
ip_neigh = str(output).split('\n')
for entry in ip_neigh:
try:
m = re.split('[\s]+', entry)
ip = m[0].strip()
lladdr = m[4].strip().lower()
self.ips[lladdr] = ip
except:
pass
def next(self):
"""Get and parse the next packet."""
header, packet = self.pc.next()
if header and packet:
self.parse_packet(header, packet)
def update_timeout(self):
"""Update timeout."""
now = time.time()
for station in self.stations.values():
age = now - station['last']
if self.stale_time > 0 and age > self.stale_time:
station['stale'] = True
if self.dead_time > 0 and age > self.dead_time:
self.stations.pop(station['mac'])
def update_screen(self):
"""Update screen."""
self.screen.clear()
# Update stale nodes
self.update_timeout()
# Update MAC to IP table
self.update_ip_list()
nodes = len(self.stations)
total_kbs = 0.0
for mac,station in self.stations.iteritems():
if 'kbs' in station:
total_kbs += station['kbs']
now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M')
top = '[{0}][frames: {1}][nodes: {2}] [total kB/s: {3}] [date: {4}]\n\n'
self.screen.addstr(top.format(self.prog, self.captured, nodes, \
total_kbs, now))
header = ' {mac:18s} {frames:7s}' \
'{kbs:>7s} {alias}\n\n'
self.screen.addstr(header.format(**
{'mac': 'mac',
'frames': 'frames',
'kbs': 'kB/s',
'alias': 'alias/ip'}))
# Sort stations according to creation time
sorted_stations = sorted(
self.stations.values(),
key=lambda s: int(s['created'] * 1000))
# Get window dimensions
maxy, maxx = self.screen.getmaxyx()
shown = 0
for station in sorted_stations:
# Break if we cant fit more clients on the screen
y, x = self.screen.getyx()
if y >= maxy - 3:
overflow = nodes - shown
self.screen.addstr(" {0} nodes not shown...".format(overflow))
break
shown += 1
# Continue if only showing aliased nodes
if self.only_alias and not station['alias']:
continue
fmt = ' {mac:18s} {frames:<7d}'\
'{kbs:>5.3f} {alias} {ip}\n'
text = fmt.format(**station)
if station['stale']:
color = curses.color_pair(3) | curses.A_BOLD
else:
color = curses.color_pair(2)
self.screen.addstr(text, color)
# Show help text
footer = "q: quit | r: reset counters | R: reset nodes"
self.screen.addstr(maxy - 1, 1, footer)
self.screen.refresh()
def add_alias(self, host, name):
"""Add alias."""
self.alias[host.lower()] = name
def reset_counters(self):
"""Reset counters."""
self.captured = 0
for station in self.stations.values():
station['frames'] = 0
station['kbs'] = 0.0
station['received'] = 0.0
def reset_nodes(self):
"""Reset nodes."""
self.stations = {}
self.reset_counters()
def parse_packet(self, header, packet):
"""Parse packet."""
self.captured += 1
# todo let's output the errors somewhere.
tap = dpkt.radiotap.Radiotap(packet)
tap_len = socket.ntohs(tap.length)
# Parse IEEE80211 header
wlan = dpkt.ieee80211.IEEE80211(packet[tap_len:])
# Currently we only care about data frames
if wlan.type is not dpkt.ieee80211.DATA_TYPE:
return
mac = mac_string(wlan.data_frame.src).lower()
# Lookup station
station = self.stations.get(mac)
# Get current time
now = time.time()
# New station
if not station:
self.stations[mac] = {}
station = self.stations[mac]
station['mac'] = mac
station['alias'] = self.alias.get(mac, '')
station['ip'] = ''
station['created'] = now
station['frames'] = 0
station['received'] = 0.0
station['kbs'] = 0.0
station['fps'] = 0
station['start'] = now
# Log last updated time
station['last'] = now
# Increment packet frame count
station['frames'] += 1
station['fps'] += 1
# Registre amount of data received
station['received'] += header.getlen()
if (now - station['start'] >= 1):
received = station['received']
fps = station['fps']
## Calculate kB/S
station['kbs'] = received / 1000.0
## Reset data counters
station['start'] = now
station['received'] = 0.0
station['fps'] = 0
# Try to set IP if empty
if station['ip'] == '':
station['ip'] = self.ips.get(mac, '')
if station['ip'] != '' and station['alias'] != '':
station['ip'] = ' (' + station['ip'] + ')'
# Station is not stale
station['stale'] = False
def parse_alias_pair(alias):
"""Parse alias mac, name pair."""
match = re.match('(..:..:..:..:..:..)=(.*)', alias, flags=re.IGNORECASE)
if not match:
raise RuntimeError('Failed to parse alias: ' + alias)
return match.group(1), match.group(2)
def alias_type(alias):
"""parse alias argument."""
try:
host, name = parse_alias_pair(alias)
except Exception as e:
raise argparse.ArgumentTypeError(e)
return (host, name)
def main():
"""Main function."""
formatter = argparse.RawTextHelpFormatter
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=formatter)
parser.add_argument('interface', help='interface to sniff')
parser.add_argument('-a', '--alias', metavar='<mac=name>',
action='append', type=alias_type,
help='alias mac with name')
parser.add_argument('-f', '--alias-file', metavar='<file>',
help='read aliases from file',
default='steinwurf_alias.txt')
parser.add_argument('-A', '--only-alias', action='store_true',
help='only show aliased nodes')
parser.add_argument('-s', '--stale-time',
type=int, default=30, metavar='<sec>',
help='consider node stale after SEC seconds')
parser.add_argument('-d', '--dead-time',
type=int, default=60, metavar='<sec>',
help='consider node dead after SEC seconds')
args = parser.parse_args()
# Create monitor object
try:
mon = ratemon(args.interface)
except Exception as e:
print("Failed to open capture: " + str(e))
sys.exit(os.EX_NOPERM)
# Setup timeouts
mon.set_stale_time(args.stale_time)
mon.set_dead_time(args.dead_time)
# Map aliases from command line
if args.alias is not None:
for a in args.alias:
host, name = a
mon.add_alias(host, name)
# Map aliases from file
if args.alias_file is not None:
with open(args.alias_file) as f:
for line in f:
# Skip comments and empty lines
if re.match('^\s*(#.*)?$', line):
continue
host, name = parse_alias_pair(line)
mon.add_alias(host, name)
mon.set_only_alias(args.only_alias)
# Setup curses
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
curses.curs_set(0)
stdscr.nodelay(1)
# Setup colors
curses.start_color()
curses.use_default_colors()
curses.init_pair(1, curses.COLOR_RED, -1)
curses.init_pair(2, curses.COLOR_GREEN, -1)
curses.init_pair(3, curses.COLOR_BLACK, -1)
# Setup screen
mon.set_screen(stdscr)
last_update = 0
while True:
now = time.time()
if now > last_update + 0.1:
try:
mon.update_screen()
except:
pass
last_update = now
try:
mon.next()
except KeyboardInterrupt:
break
except:
pass
ch = stdscr.getch()
if ch == ord('q'):
break
if ch == ord('r'):
mon.reset_counters()
if ch == ord('R'):
mon.reset_counters()
mon.reset_nodes()
# Cleanup curses
curses.nocbreak()
curses.echo()
curses.curs_set(1)
curses.endwin()
if __name__ == '__main__':
main()
|
|
"""
<Program Name>
missing_seattle_install.py
<Started>
June 2009
<Author>
n2k8000@u.washington.edu
Konstantin Pik
<Purpose>
This file will read in a list file passed into it, and from that list it
will install seattle on all of those nodes. The list file is to be in the
file format specified for .LIST files (!user:[username], followed by list of
IPs).
<Usage>
python missing_seattle_install.py missing.list
Note: missing.list is the default file name.
"""
import thread
import time
import sys
# for remote_shellexec
import deploy_network
import deploy_threading
# the running thread counter
thread_counter = 0
# the lock on the thread_counter, just to make sure add/sub is atomic
thread_lock = thread.allocate_lock()
def get_remote_hosts_from_file(fname = 'missing.list'):
"""
<Purpose>
Returns a list of the IP as read from file specified.
File format is:
!user:[username]
[IPs]
[username] is the username that will be used until a new $username is
specified in the same format. NOTE: Username is case sensitive.
[IPs] are a list of IPs/hostname (one per line) associated with that
username
<Arguments>
fname:
Optional. The filename containing the IPs of the remote machines. File
must be in the same directory as this script.
<Exceptions>
Catches a thrown exception if the IP file is not found.
<Side Effects>
None.
<Returns>
Returns a list of tuples with (username, ip) on success, False on failure
"""
# IP file must be in the same dir as this script
try:
file_of_ips = open(fname, 'r')
except Exception, e:
print 'Error: Are you missing your list of remote hosts? ('+str(e)+')'
file_of_ips.close()
return False
else:
# flag on whether we have any remote hosts (there are users, and comments
# in the file as well
have_one_ip = False
# initialize dict
users_ip_tuple_list = []
current_username = ''
# Python docs suggest doing this instead of reading in whole file into mem:
for line in file_of_ips:
# if first chars match what we want ('!user:' is 6 chars long)
if line[0:6].lower() == '!user:':
# grab everything after the '!user:' string
# -1 so we drop the \n and leading/trailing spaces
current_username = line[6:-1].strip()
else:
# ignore blank lines and spaces
if line.strip('\n '):
# and ignore comments (lines starting with #)
if line.strip('\n ')[0] != '#':
# if we get here, then we have an IP so we need to check that
# user is not empty.. log err if it is and complain.
if not current_username:
print 'Critical Error: No username specified for remote host group!'
file_of_ips.close()
return False
# add (username, remote_host) pair
users_ip_tuple_list.append((current_username, line.rstrip('\n ')))
# set flag that we have at least one ip
have_one_ip = True
# return true only if we have at least ONE ip that we added to the list
# and not just a bunch of users
if have_one_ip:
# lets make the list a set, which is a cheap way of getting rid of
# duplicates, then cast back to list.
finalized_list = list(set(users_ip_tuple_list))
print "Found "+str(len(finalized_list))+" unique hosts to connect to."
file_of_ips.close()
return finalized_list
file_of_ips.close()
return False
def format_print(out, err):
"""
<Purpose>
Will print out the non-empty out/err strings once they're properly
formatted. Intended to format stdout and stderr. Also will print to
missing.log
<Arguments>
out:
stdout
err:
std error
<Exceptions>
None.
<Side Effects>
None.
<Returns>
None.
"""
try:
out = out.strip('\n\r ')
err = err.strip('\n\r ')
logfilehandle = open('missing.log', 'a')
if out:
print out
logfilehandle.write(out+'\n')
if err:
print err
logfilehandle.write(err+'\n')
logfilehandle.close()
except Exception, e:
print 'Error while writing file and/or formatting data'
print e
return
def worker(cmd, username, host):
"""
<Purpose>
Worker thread that makes calls to remote_shellexec and increments
the running thread counter until the thread has finished.
<Arguments>
cmd:
the command string to execute on the machine
username:
the username to log in as
host:
the remote hostname/ip to install on.
<Exceptions>
None.
<Side Effects>
None.
<Returns>
None.
"""
global thread_counter, thread_lock
# do an atomic add on the number of threads running
thread_lock.acquire()
thread_counter += 1
thread_lock.release()
out, err, retcode = deploy_network.remote_shellexec(cmd, username, host)
format_print('|\n\n Log from node: '+host+'\n'+out, err)
# do an atomic subtract on the number of threads running
thread_lock.acquire()
thread_counter -= 1
thread_lock.release()
def main():
"""
<Purpose>
Entry point into the program. Reads the hosts that need installing
from file and then starts the threads that will take care of downloading
and installing seattle. Then waits for all threads to finish. This takes
a while as an RSA key needs to be generated during each install.
<Arguments>
None
<Exceptions>
Possible exception when launching new threads.
<Side Effects>
None.
<Returns>
None.
"""
# start the timeout monitor thread
deploy_threading.init()
# the fn of the file that contains the list of nodes we'll be using
nodelist_fn = ''
# did we get a parameter passed in? if so that's our fn
if len(sys.argv) > 1:
nodelist_fn = sys.argv[1]
print 'Using '+nodelist_fn+' filename to read in hostnames'
else:
print 'Using default missing.list filename to read in hostnames'
# get hosts from file
if nodelist_fn:
hosts = get_remote_hosts_from_file(nodelist_fn)
else: # use default fn
hosts = get_remote_hosts_from_file()
# if we have hostnames
if hosts:
# build up a command string that'll download and install seattle
cmd_list = []
# try to uninstall seattle_repy, then remove dir
cmd_list.append('cd seattle_repy; ./stop_seattle.sh; ./uninstall.sh')
# 1. Remove old file, and download the file
cmd_list.append('cd ~; rm -rf seattle_linux.tgz; rm -rf seattle_repy')
cmd_list.append('wget https://seattlegeni.cs.washington.edu/geni/download/flibble/seattle_linux.tgz')
# 2. Untar
cmd_list.append('tar -xf seattle_linux.tgz')
# 3. Change into seattle_repy directory and execute python install.py and start seattle
cmd_list.append('cd seattle_repy; python install.py; ./start_seattle.sh ')
# merge into a command string
cmd_str = '; '.join(cmd_list)
while hosts:
# 8 is the number of threads we'll launch
while thread_counter < 8:
# grab a tuple from hosts array
host = hosts.pop()
# separate it out for clarity
user = host[0]
machine = host[1]
print 'Starting on '+str(machine)
try:
# start thread and then give it some time to boot up
thread.start_new_thread(worker, (cmd_str, user, machine,))
time.sleep(.2)
except Exception, e:
print "Exception while trying to start worker thread"
print e
return
# wait until we're done...
while thread_counter > 0:
time.sleep(1)
if __name__ == "__main__":
main()
|
|
from __future__ import unicode_literals
import json
from django.conf import settings
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models import QuerySet, Q
from django.utils.encoding import python_2_unicode_compatible, smart_text
from django.utils.six import iteritems, integer_types
from django.utils.translation import ugettext_lazy as _
from jsonfield.fields import JSONField
class LogEntryManager(models.Manager):
"""
Custom manager for the :py:class:`LogEntry` model.
"""
def log_create(self, instance, **kwargs):
"""
Helper method to create a new log entry. This method automatically populates some fields when no explicit value
is given.
:param instance: The model instance to log a change for.
:type instance: Model
:param kwargs: Field overrides for the :py:class:`LogEntry` object.
:return: The new log entry or `None` if there were no changes.
:rtype: LogEntry
"""
changes = kwargs.get('changes', None)
pk = self._get_pk_value(instance)
if changes is not None:
kwargs.setdefault('content_type', ContentType.objects.get_for_model(instance))
kwargs.setdefault('object_pk', pk)
kwargs.setdefault('object_repr', smart_text(instance))
if isinstance(pk, integer_types):
kwargs.setdefault('object_id', pk)
get_additional_data = getattr(instance, 'get_additional_data', None)
if callable(get_additional_data):
kwargs.setdefault('additional_data', get_additional_data())
# Delete log entries with the same pk as a newly created model. This should only be necessary when an pk is
# used twice.
if kwargs.get('action', None) is LogEntry.Action.CREATE:
if kwargs.get('object_id', None) is not None and self.filter(content_type=kwargs.get('content_type'), object_id=kwargs.get('object_id')).exists():
self.filter(content_type=kwargs.get('content_type'), object_id=kwargs.get('object_id')).delete()
else:
self.filter(content_type=kwargs.get('content_type'), object_pk=kwargs.get('object_pk', '')).delete()
return self.create(**kwargs)
return None
def get_for_object(self, instance):
"""
Get log entries for the specified model instance.
:param instance: The model instance to get log entries for.
:type instance: Model
:return: QuerySet of log entries for the given model instance.
:rtype: QuerySet
"""
# Return empty queryset if the given model instance is not a model instance.
if not isinstance(instance, models.Model):
return self.none()
content_type = ContentType.objects.get_for_model(instance.__class__)
pk = self._get_pk_value(instance)
if isinstance(pk, integer_types):
return self.filter(content_type=content_type, object_id=pk)
else:
return self.filter(content_type=content_type, object_pk=pk)
def get_for_objects(self, queryset):
"""
Get log entries for the objects in the specified queryset.
:param queryset: The queryset to get the log entries for.
:type queryset: QuerySet
:return: The LogEntry objects for the objects in the given queryset.
:rtype: QuerySet
"""
if not isinstance(queryset, QuerySet) or queryset.count() == 0:
return self.none()
content_type = ContentType.objects.get_for_model(queryset.model)
primary_keys = queryset.values_list(queryset.model._meta.pk.name, flat=True)
if isinstance(primary_keys[0], integer_types):
return self.filter(content_type=content_type).filter(Q(object_id__in=primary_keys)).distinct()
else:
return self.filter(content_type=content_type).filter(Q(object_pk__in=primary_keys)).distinct()
def get_for_model(self, model):
"""
Get log entries for all objects of a specified type.
:param model: The model to get log entries for.
:type model: class
:return: QuerySet of log entries for the given model.
:rtype: QuerySet
"""
# Return empty queryset if the given object is not valid.
if not issubclass(model, models.Model):
return self.none()
content_type = ContentType.objects.get_for_model(model)
return self.filter(content_type=content_type)
def _get_pk_value(self, instance):
"""
Get the primary key field value for a model instance.
:param instance: The model instance to get the primary key for.
:type instance: Model
:return: The primary key value of the given model instance.
"""
pk_field = instance._meta.pk.name
pk = getattr(instance, pk_field, None)
# Check to make sure that we got an pk not a model object.
if isinstance(pk, models.Model):
pk = self._get_pk_value(pk)
return pk
@python_2_unicode_compatible
class LogEntry(models.Model):
"""
Represents an entry in the audit log. The content type is saved along with the textual and numeric (if available)
primary key, as well as the textual representation of the object when it was saved. It holds the action performed
and the fields that were changed in the transaction.
If AuditlogMiddleware is used, the actor will be set automatically. Keep in mind that editing / re-saving LogEntry
instances may set the actor to a wrong value - editing LogEntry instances is not recommended (and it should not be
necessary).
"""
class Action:
"""
The actions that Auditlog distinguishes: creating, updating and deleting objects. Viewing objects is not logged.
The values of the actions are numeric, a higher integer value means a more intrusive action. This may be useful
in some cases when comparing actions because the ``__lt``, ``__lte``, ``__gt``, ``__gte`` lookup filters can be
used in queries.
The valid actions are :py:attr:`Action.CREATE`, :py:attr:`Action.UPDATE` and :py:attr:`Action.DELETE`.
"""
CREATE = 0
UPDATE = 1
DELETE = 2
choices = (
(CREATE, _("create")),
(UPDATE, _("update")),
(DELETE, _("delete")),
)
content_type = models.ForeignKey('contenttypes.ContentType', on_delete=models.CASCADE, related_name='+', verbose_name=_("content type"))
object_pk = models.CharField(db_index=True, max_length=255, verbose_name=_("object pk"))
object_id = models.BigIntegerField(blank=True, db_index=True, null=True, verbose_name=_("object id"))
object_repr = models.TextField(verbose_name=_("object representation"))
action = models.PositiveSmallIntegerField(choices=Action.choices, verbose_name=_("action"))
changes = models.TextField(blank=True, verbose_name=_("change message"))
actor = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.SET_NULL, related_name='+', verbose_name=_("actor"))
remote_addr = models.GenericIPAddressField(blank=True, null=True, verbose_name=_("remote address"))
timestamp = models.DateTimeField(auto_now_add=True, verbose_name=_("timestamp"))
additional_data = JSONField(blank=True, null=True, verbose_name=_("additional data"))
objects = LogEntryManager()
class Meta:
get_latest_by = 'timestamp'
ordering = ['-timestamp']
verbose_name = _("log entry")
verbose_name_plural = _("log entries")
def __str__(self):
if self.action == self.Action.CREATE:
fstring = _("Created {repr:s}")
elif self.action == self.Action.UPDATE:
fstring = _("Updated {repr:s}")
elif self.action == self.Action.DELETE:
fstring = _("Deleted {repr:s}")
else:
fstring = _("Logged {repr:s}")
return fstring.format(repr=self.object_repr)
@property
def changes_dict(self):
"""
:return: The changes recorded in this log entry as a dictionary object.
"""
try:
return json.loads(self.changes)
except ValueError:
return {}
@property
def changes_str(self, colon=': ', arrow=smart_text(' \u2192 '), separator='; '):
"""
Return the changes recorded in this log entry as a string. The formatting of the string can be customized by
setting alternate values for colon, arrow and separator. If the formatting is still not satisfying, please use
:py:func:`LogEntry.changes_dict` and format the string yourself.
:param colon: The string to place between the field name and the values.
:param arrow: The string to place between each old and new value.
:param separator: The string to place between each field.
:return: A readable string of the changes in this log entry.
"""
substrings = []
for field, values in iteritems(self.changes_dict):
substring = smart_text('{field_name:s}{colon:s}{old:s}{arrow:s}{new:s}').format(
field_name=field,
colon=colon,
old=values[0],
arrow=arrow,
new=values[1],
)
substrings.append(substring)
return separator.join(substrings)
class AuditlogHistoryField(GenericRelation):
"""
A subclass of py:class:`django.contrib.contenttypes.fields.GenericRelation` that sets some default variables. This
makes it easier to access Auditlog's log entries, for example in templates.
By default this field will assume that your primary keys are numeric, simply because this is the most common case.
However, if you have a non-integer primary key, you can simply pass ``pk_indexable=False`` to the constructor, and
Auditlog will fall back to using a non-indexed text based field for this model.
Using this field will not automatically register the model for automatic logging. This is done so you can be more
flexible with how you use this field.
:param pk_indexable: Whether the primary key for this model is not an :py:class:`int` or :py:class:`long`.
:type pk_indexable: bool
"""
def __init__(self, pk_indexable=True, **kwargs):
kwargs['to'] = LogEntry
if pk_indexable:
kwargs['object_id_field'] = 'object_id'
else:
kwargs['object_id_field'] = 'object_pk'
kwargs['content_type_field'] = 'content_type'
super(AuditlogHistoryField, self).__init__(**kwargs)
# South compatibility for AuditlogHistoryField
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^auditlog\.models\.AuditlogHistoryField"])
raise DeprecationWarning("South support will be dropped in django-auditlog 0.4.0 or later.")
except ImportError:
pass
|
|
##############################################################################
# Copyright 2016-2018 Rigetti Computing
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import re
import time
import warnings
from json.decoder import JSONDecodeError
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union, cast
import numpy as np
import requests
from requests.adapters import HTTPAdapter
from urllib3 import Retry
from pyquil.api._config import PyquilConfig
from pyquil.api._error_reporting import _record_call
from pyquil.api._errors import (
error_mapping,
ApiError,
UserMessageError,
UnknownApiError,
TooManyQubitsError,
)
from pyquil.api._logger import logger
from pyquil.quil import Program
from pyquil.version import __version__
from pyquil.wavefunction import Wavefunction
TYPE_EXPECTATION = "expectation"
TYPE_MULTISHOT = "multishot"
TYPE_MULTISHOT_MEASURE = "multishot-measure"
TYPE_WAVEFUNCTION = "wavefunction"
def get_json(session: requests.Session, url: str, params: Optional[Dict[Any, Any]] = None) -> Any:
"""
Get JSON from a Forest endpoint.
"""
logger.debug("Sending GET request to %s. Params: %s", url, params)
res = session.get(url, params=params)
if res.status_code >= 400:
raise parse_error(res)
return res.json()
def post_json(session: requests.Session, url: str, json: Any) -> requests.models.Response:
"""
Post JSON to the Forest endpoint.
"""
logger.debug("Sending POST request to %s. Body: %s", url, json)
res = session.post(url, json=json)
if res.status_code >= 400:
raise parse_error(res)
return res
def parse_error(res: requests.Response) -> ApiError:
"""
Every server error should contain a "status" field with a human readable explanation of
what went wrong as well as a "error_type" field indicating the kind of error that can be mapped
to a Python type.
There's a fallback error UnknownError for other types of exceptions (network issues, api
gateway problems, etc.)
"""
try:
body = res.json()
except JSONDecodeError:
raise UnknownApiError(res.text)
if "error_type" not in body:
raise UnknownApiError(str(body))
error_type = body["error_type"]
status = body["status"]
if re.search(r"[0-9]+ qubits were requested, but the QVM is limited to [0-9]+ qubits.", status):
return TooManyQubitsError(status)
error_cls = error_mapping.get(error_type, UnknownApiError)
return error_cls(status)
def get_session(*args: Any, **kwargs: Any) -> "ForestSession":
"""
Create a requests session to access the REST API
:return: requests session
:rtype: Session
"""
session = ForestSession(*args, **kwargs)
retry_adapter = HTTPAdapter(
max_retries=Retry(
total=3,
method_whitelist=["POST"],
status_forcelist=[502, 503, 504, 521, 523],
backoff_factor=0.2,
raise_on_status=False,
)
)
session.mount("http://", retry_adapter)
session.mount("https://", retry_adapter)
# We need this to get binary payload for the wavefunction call.
session.headers.update({"Accept": "application/octet-stream"})
session.headers.update({"Content-Type": "application/json; charset=utf-8"})
return session
def validate_noise_probabilities(noise_parameter: Optional[List[float]]) -> None:
"""
Is noise_parameter a valid specification of noise probabilities for depolarizing noise?
:param list noise_parameter: List of noise parameter values to be validated.
"""
if not noise_parameter:
return
if not isinstance(noise_parameter, list):
raise TypeError("noise_parameter must be a list")
if any([not isinstance(value, float) for value in noise_parameter]):
raise TypeError("noise_parameter values should all be floats")
if len(noise_parameter) != 3:
raise ValueError("noise_parameter lists must be of length 3")
if sum(noise_parameter) > 1 or sum(noise_parameter) < 0:
raise ValueError("sum of entries in noise_parameter must be between 0 and 1 (inclusive)")
if any([value < 0 for value in noise_parameter]):
raise ValueError("noise_parameter values should all be non-negative")
def validate_qubit_list(qubit_list: Sequence[int]) -> Sequence[int]:
"""
Check the validity of qubits for the payload.
:param qubit_list: List of qubits to be validated.
"""
if not isinstance(qubit_list, Sequence):
raise TypeError("'qubit_list' must be of type 'Sequence'")
if any(not isinstance(i, int) or i < 0 for i in qubit_list):
raise TypeError("'qubit_list' must contain positive integer values")
return qubit_list
def prepare_register_list(
register_dict: Dict[str, Union[bool, Sequence[int]]]
) -> Dict[str, Union[bool, Sequence[int]]]:
"""
Canonicalize classical addresses for the payload and ready MemoryReference instances
for serialization.
This function will cast keys that are iterables of int-likes to a list of Python
ints. This is to support specifying the register offsets as ``range()`` or numpy
arrays. This mutates ``register_dict``.
:param register_dict: The classical memory to retrieve. Specified as a dictionary:
the keys are the names of memory regions, and the values are either (1) a list of
integers for reading out specific entries in that memory region, or (2) True, for
reading out the entire memory region.
"""
if not isinstance(register_dict, dict):
raise TypeError("register_dict must be a dict but got " + repr(register_dict))
for k, v in register_dict.items():
if isinstance(v, bool):
assert v # If boolean v must be True
continue
indices = [int(x) for x in v] # support ranges, numpy, ...
if not all(x >= 0 for x in indices):
raise TypeError("Negative indices into classical arrays are not allowed.")
register_dict[k] = indices
return register_dict
def run_and_measure_payload(
quil_program: Program, qubits: Sequence[int], trials: int, random_seed: int
) -> Dict[str, object]:
"""REST payload for :py:func:`ForestConnection._run_and_measure`"""
if not quil_program:
raise ValueError(
"You have attempted to run an empty program."
" Please provide gates or measure instructions to your program."
)
if not isinstance(quil_program, Program):
raise TypeError("quil_program must be a Quil program object")
qubits = validate_qubit_list(qubits)
if not isinstance(trials, int):
raise TypeError("trials must be an integer")
payload = {
"type": TYPE_MULTISHOT_MEASURE,
"qubits": list(qubits),
"trials": trials,
"compiled-quil": quil_program.out(calibrations=False),
}
if random_seed is not None:
payload["rng-seed"] = random_seed
return payload
def wavefunction_payload(quil_program: Program, random_seed: int) -> Dict[str, object]:
"""REST payload for :py:func:`ForestConnection._wavefunction`"""
if not isinstance(quil_program, Program):
raise TypeError("quil_program must be a Quil program object")
payload: Dict[str, object] = {
"type": TYPE_WAVEFUNCTION,
"compiled-quil": quil_program.out(calibrations=False),
}
if random_seed is not None:
payload["rng-seed"] = random_seed
return payload
def expectation_payload(
prep_prog: Program, operator_programs: Optional[Iterable[Program]], random_seed: int
) -> Dict[str, object]:
"""REST payload for :py:func:`ForestConnection._expectation`"""
if operator_programs is None:
operator_programs = [Program()]
if not isinstance(prep_prog, Program):
raise TypeError("prep_prog variable must be a Quil program object")
payload: Dict[str, object] = {
"type": TYPE_EXPECTATION,
"state-preparation": prep_prog.out(calibrations=False),
"operators": [x.out(calibrations=False) for x in operator_programs],
}
if random_seed is not None:
payload["rng-seed"] = random_seed
return payload
def qvm_run_payload(
quil_program: Program,
classical_addresses: Dict[str, Union[bool, Sequence[int]]],
trials: int,
measurement_noise: Optional[Tuple[float, float, float]],
gate_noise: Optional[Tuple[float, float, float]],
random_seed: Optional[int],
) -> Dict[str, object]:
"""REST payload for :py:func:`ForestConnection._qvm_run`"""
if not quil_program:
raise ValueError(
"You have attempted to run an empty program."
" Please provide gates or measure instructions to your program."
)
if not isinstance(quil_program, Program):
raise TypeError("quil_program must be a Quil program object")
classical_addresses = prepare_register_list(classical_addresses)
if not isinstance(trials, int):
raise TypeError("trials must be an integer")
payload = {
"type": TYPE_MULTISHOT,
"addresses": classical_addresses,
"trials": trials,
"compiled-quil": quil_program.out(calibrations=False),
}
if measurement_noise is not None:
payload["measurement-noise"] = measurement_noise
if gate_noise is not None:
payload["gate-noise"] = gate_noise
if random_seed is not None:
payload["rng-seed"] = random_seed
return payload
class ForestSession(requests.Session):
"""
ForestSession inherits from requests.Session. It is responsible for adding
authentication headers to Forest server requests. Upon receiving a 401 or 403
response, it will attempt to refresh the auth credential and update the
PyquilConfig, which in turn writes the refreshed auth credential to file.
Encapsulates the operations required for authorization & encryption
with the QPU.
Two operations are involved in authorization:
* Requesting & storing a user authentication token, used to authenticate calls
to Forest, Dispatch, and other Rigetti services
* Requesting a Curve ZeroMQ keypair for connection to the QPU. The response to
this request also comes with service endpoints: compiler server and QPU
The authentication tokens are of the standard JWT format and are issued by Forest Server.
The refresh token is only used to renew the access token, which is used for all transactions
and is valid for a short period of time.
In wrapping the PyQuilConfig object, it provides that object with a callback to
retrieve a valid engagement when needed, because the engagement is maintained here
but is used by the config to provide service endpoints.
"""
def __init__(self, *, config: PyquilConfig, lattice_name: Optional[str] = None):
super().__init__()
self.config = config
self.config.get_engagement = self.get_engagement
self._engagement: Optional["Engagement"] = None
self.headers.update(self.config.qcs_auth_headers)
self.headers["User-Agent"] = f"PyQuil/{__version__}"
self.lattice_name = lattice_name
def _engage(self) -> Optional["Engagement"]:
"""
The heart of the QPU authorization process, ``engage`` makes a request to
the dispatch server for the information needed to communicate with the QPU.
This is a standard GraphQL request, authenticated using the access token
retrieved from Forest Server.
The response includes the endpoints to the QPU and QPU Compiler Server,
along with the set of keys necessary to connect to the QPU and the time at
which that key set expires.
"""
query = """
mutation Engage($name: String!) {
engage(input: { lattice: { name: $name }}) {
success
message
engagement {
type
qpu {
endpoint
credentials {
clientPublic
clientSecret
serverPublic
}
}
compiler {
endpoint
}
expiresAt
}
}
}
"""
if not self.lattice_name:
logger.debug("ForestSession requires lattice_name in order to engage")
return None
logger.debug("Requesting engagement from %s", self.config.dispatch_url)
variables = dict(name=self.lattice_name)
query_response = self._request_graphql_retry(
self.config.dispatch_url, query=query, variables=variables
)
if query_response.get("errors"):
errors = query_response.get("errors", [])
error_messages = map(lambda error: error["message"], errors) # type: ignore
raise UserMessageError(f"Failed to engage: {','.join(error_messages)}")
engagement_response = query_response.get("data", {}).get("engage", None)
if engagement_response and engagement_response.get("success") is True:
logger.debug("Engagement successful")
engagement_data = engagement_response.get("engagement", {})
return Engagement(
client_secret_key=engagement_data.get("qpu", {})
.get("credentials", {})
.get("clientSecret", "")
.encode("utf-8"),
client_public_key=engagement_data.get("qpu", {})
.get("credentials", {})
.get("clientPublic", "")
.encode("utf-8"),
server_public_key=engagement_data.get("qpu", {})
.get("credentials", {})
.get("serverPublic", "")
.encode("utf-8"),
expires_at=engagement_data.get("expiresAt", {}),
qpu_endpoint=engagement_data.get("qpu", {}).get("endpoint"),
qpu_compiler_endpoint=engagement_data.get("compiler", {}).get("endpoint"),
)
else:
raise UserMessageError(
f"Unable to engage {self.lattice_name}: "
f"{engagement_response.get('message', 'No message')}"
)
def get_engagement(self) -> Optional["Engagement"]:
"""
Returns memoized engagement information, if still valid - or requests a new engagement
and then stores and returns that.
"""
if not (self._engagement and self._engagement.is_valid()):
self._engagement = self._engage()
return self._engagement
def _refresh_auth_token(self) -> bool:
self.config.assert_valid_auth_credential()
if self.config.user_auth_token is not None:
return self._refresh_user_auth_token()
elif self.config.qmi_auth_token is not None:
return self._refresh_qmi_auth_token()
return False
def _refresh_user_auth_token(self) -> bool:
url = f"{self.config.forest_url}/auth/idp/oauth2/v1/token"
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Cache-Control": "no-cache",
"Accept": "application/json",
}
assert self.config.user_auth_token is not None
data = {
"grant_type": "refresh_token",
"scope": self.config.user_auth_token["scope"],
"refresh_token": self.config.user_auth_token["refresh_token"],
}
response = super().request("POST", url, data=data, headers=headers)
if response.status_code == 200:
self.config.update_user_auth_token(response.json())
self.headers.update(self.config.qcs_auth_headers)
return True
logger.warning(
f"Failed to refresh your user auth token at {self.config.user_auth_token_path}. "
f"Server response: {response.text}"
)
return False
def _refresh_qmi_auth_token(self) -> bool:
url = f"{self.config.forest_url}/auth/qmi/refresh"
headers = {"Content-Type": "application/json", "Accept": "application/json"}
data = self.config.qmi_auth_token
response = super().request("POST", url, json=data, headers=headers)
if response.status_code == 200:
self.config.update_qmi_auth_token(response.json())
self.headers.update(self.config.qcs_auth_headers)
return True
logger.warning(
f"Failed to refresh your QMI auth token at {self.config.qmi_auth_token_path}. "
f"Server response: {response.text}"
)
return False
def request(self, *args: Any, **kwargs: Any) -> requests.models.Response:
"""
request is a wrapper around requests.Session#request that checks for
401 and 403 response statuses and refreshes the auth credential
accordingly.
"""
response = super().request(*args, **kwargs)
if response.status_code in {401, 403}:
if self._refresh_auth_token():
response = super().request(*args, **kwargs)
return response
def _request_graphql(self, url: str, query: str, variables: Dict[Any, Any]) -> Dict[Any, Any]:
"""
Makes a single graphql request using the session credentials, throwing an error
if the response is not valid JSON.
Returns the JSON parsed from the response.
"""
response = super().post(url, json=dict(query=query, variables=variables))
try:
return cast(Dict[Any, Any], response.json())
except JSONDecodeError as e:
logger.exception(f"Unable to parse json response from endpoint {url}:", response.text)
raise e
def _request_graphql_retry(self, *args: Any, **kwargs: Any) -> Dict[Any, Any]:
"""
Makes a GraphQL request using session credentials, refreshing them once if the server
identifies them as expired.
Determining whether a call has failed to a GraphQL endpoint is less axiomatic than for a
REST interface, and so here we follow the pattern set by Rigetti services, which return an
HTTP 200 response with an array of errors. If any of those errors cite an expired
authentication token, we refresh the token to clear that error. Note that other error
messages will not trigger a retry.
"""
result = self._request_graphql(*args, **kwargs)
errors = result.get("errors", [])
token_is_expired = any(
error.get("extensions", {}).get("code") == "AUTH_TOKEN_EXPIRED" for error in errors
)
if token_is_expired:
if self._refresh_auth_token():
result = self._request_graphql(*args, **kwargs)
return result
class ForestConnection:
@_record_call
def __init__(
self,
sync_endpoint: Optional[str] = None,
compiler_endpoint: Optional[str] = None,
forest_cloud_endpoint: Optional[str] = None,
):
"""
Represents a connection to Forest containing methods to wrap all possible API endpoints.
Users should not use methods from this class directly.
:param sync_endpoint: The endpoint of the server for running QVM jobs
:param compiler_endpoint: The endpoint of the server for running quilc compiler jobs
:param forest_cloud_endpoint: The endpoint of the forest cloud server
"""
pyquil_config = PyquilConfig()
if sync_endpoint is None:
sync_endpoint = pyquil_config.qvm_url
if compiler_endpoint is None:
compiler_endpoint = pyquil_config.quilc_url
if forest_cloud_endpoint is None:
forest_cloud_endpoint = pyquil_config.forest_url
assert sync_endpoint is not None
self.sync_endpoint = sync_endpoint
self.compiler_endpoint = compiler_endpoint
self.forest_cloud_endpoint = forest_cloud_endpoint
self.session = get_session(config=pyquil_config)
@_record_call
def _run_and_measure(
self, quil_program: Program, qubits: Sequence[int], trials: int, random_seed: int
) -> np.ndarray:
"""
Run a Forest ``run_and_measure`` job.
Users should use :py:func:`WavefunctionSimulator.run_and_measure` instead of calling
this directly.
"""
payload = run_and_measure_payload(quil_program, qubits, trials, random_seed)
response = post_json(self.session, self.sync_endpoint + "/qvm", payload)
return np.asarray(response.json())
@_record_call
def _wavefunction(self, quil_program: Program, random_seed: int) -> Wavefunction:
"""
Run a Forest ``wavefunction`` job.
Users should use :py:func:`WavefunctionSimulator.wavefunction` instead of calling
this directly.
"""
payload = wavefunction_payload(quil_program, random_seed)
response = post_json(self.session, self.sync_endpoint + "/qvm", payload)
return Wavefunction.from_bit_packed_string(response.content)
@_record_call
def _expectation(
self, prep_prog: Program, operator_programs: Iterable[Program], random_seed: int
) -> np.ndarray:
"""
Run a Forest ``expectation`` job.
Users should use :py:func:`WavefunctionSimulator.expectation` instead of calling
this directly.
"""
if isinstance(operator_programs, Program):
warnings.warn(
"You have provided a Program rather than a list of Programs. The results "
"from expectation will be line-wise expectation values of the "
"operator_programs.",
SyntaxWarning,
)
payload = expectation_payload(prep_prog, operator_programs, random_seed)
response = post_json(self.session, self.sync_endpoint + "/qvm", payload)
return np.asarray(response.json())
@_record_call
def _qvm_run(
self,
quil_program: Program,
classical_addresses: Dict[str, Union[bool, Sequence[int]]],
trials: int,
measurement_noise: Optional[Tuple[float, float, float]],
gate_noise: Optional[Tuple[float, float, float]],
random_seed: Optional[int],
) -> Dict[str, np.ndarray]:
"""
Run a Forest ``run`` job on a QVM.
Users should use :py:func:`QVM.run` instead of calling this directly.
"""
payload = qvm_run_payload(
quil_program, classical_addresses, trials, measurement_noise, gate_noise, random_seed
)
response = post_json(self.session, self.sync_endpoint + "/qvm", payload)
ram: Dict[str, np.ndarray] = {key: np.array(val) for key, val in response.json().items()}
for k in ram.keys():
ram[k] = np.array(ram[k])
return ram
@_record_call
def _qvm_get_version_info(self) -> str:
"""
Return version information for the QVM.
:return: String of QVM version
"""
response = post_json(self.session, self.sync_endpoint, {"type": "version"})
split_version_string = response.text.split()
try:
qvm_version = split_version_string[0]
except ValueError:
raise TypeError(f"Malformed version string returned by the QVM: {response.text}")
return qvm_version
class Engagement:
"""
An Engagement stores all the information retrieved via an engagement request sent to
the dispatch server.
"""
def __init__(
self,
client_public_key: bytes,
client_secret_key: bytes,
server_public_key: bytes,
expires_at: Union[int, float, str],
qpu_endpoint: str,
qpu_compiler_endpoint: str,
):
self.client_public_key = client_public_key
self.client_secret_key = client_secret_key
self.server_public_key = server_public_key
self.expires_at = float(expires_at) if expires_at else None
self.qpu_endpoint = qpu_endpoint
self.qpu_compiler_endpoint = qpu_compiler_endpoint
logger.debug("New engagement created: \n%s", self)
def is_valid(self) -> bool:
"""
Return true if an engagement is valid for use, false if it is missing required
fields
An 'invalid' engagement is one which will not grant access to the QPU.
"""
return all(
[
self.client_public_key is not None,
self.client_secret_key is not None,
self.server_public_key is not None,
(self.expires_at is None or self.expires_at > time.time()),
self.qpu_endpoint is not None,
]
)
def __str__(self) -> str:
return f"""Client public key: {self.client_public_key}
Client secret key: masked ({len(self.client_secret_key)} B)
Server public key: {self.server_public_key}
Expiration time: {self.expires_at}
QPU Endpoint: {self.qpu_endpoint}
QPU Compiler Endpoint: {self.qpu_compiler_endpoint}""" # type: ignore
|
|
"""
Connecting to a set of solr servers.
To get a :class:`~solrcloudpy.SolrCollection` instance from a :class:`SolrConnection` use either dictionary-style or attribute-style access:
>>> from solrcloudpy.connection import SolrConnection
>>> conn = SolrConnection()
>>> conn.list()
[u'collection1']
>>> conn['collection1']
SolrCollection<collection1>
"""
import urllib
import json
import semver
import solrcloudpy.collection as collection
from solrcloudpy.utils import _Request
MIN_SUPPORTED_VERSION = '>=4.6.0'
# TODO: revisit this when Solr 7 comes around.
MAX_SUPPORTED_VERSION = '<=7.0.0'
class SolrConnection(object):
"""
Connection to a solr server or several ones
:param server: The server. Can be a single one or a list of servers. Example ``localhost:8983`` or ``[localhost,solr1.domain.com:8983]``.
:type server: str
:param detect_live_nodes: whether to detect live nodes automativally or not. This assumes that one is able to access the IPs listed by Zookeeper. The default value is ``False``.
:type detect_live_nodes: bool
:param user: HTTP basic auth user name
:type user: str
:param password: HTTP basic auth password
:type password: str
:param timeout: timeout for HTTP requests
:type timeout: int
:param webappdir: the solr webapp directory; defaults to 'solr'
:type webappdir: str
:param version: the solr version we're currently running. defaults to 5.3.0 for backwards compatibility. must be semver compliant
:type version: str
:param request_retries: number of times to retry a request against the same server. particularly useful for load-balancing or proxy situations.
:type request_retries: int
"""
def __init__(self, server="localhost:8983",
detect_live_nodes=False,
user=None,
password=None,
timeout=10,
webappdir='solr',
version='5.3.0',
request_retries=1):
self.user = user
self.password = password
self.timeout = timeout
self.webappdir = webappdir
self.version = version
self.request_retries = request_retries
if not semver.match(version, MIN_SUPPORTED_VERSION) and semver.match(version, MAX_SUPPORTED_VERSION):
raise StandardError("Unsupported version %s" % version)
if semver.match(self.version, '<5.4.0'):
self.zk_path = '/{webappdir}/zookeeper'.format(webappdir=self.webappdir)
else:
self.zk_path = '/{webappdir}/admin/zookeeper'.format(webappdir=self.webappdir)
self.url_template = 'http://{{server}}/{webappdir}/'.format(webappdir=self.webappdir)
if type(server) == str:
self.url = self.url_template.format(server=server)
servers = [self.url, self.url]
if detect_live_nodes:
url = servers[0]
self.servers = self.detect_nodes(url)
else:
self.servers = servers
if type(server) == list:
servers = [self.url_template.format(server=a) for a in server]
if detect_live_nodes:
url = servers[0]
self.servers = self.detect_nodes(url)
else:
self.servers = servers
self.client = _Request(self)
def detect_nodes(self, _):
"""
Queries Solr's zookeeper integration for live nodes
DEPRECATED
:return: a list of sorl URLs corresponding to live nodes in solrcloud
:rtype: list
"""
return self.live_nodes
def list(self):
"""
Lists out the current collections in the cluster
This should probably be a recursive function but I'm not in the mood today
:return: a list of collection names
:rtype: list
"""
params = {'detail': 'false', 'path': '/collections'}
response = self.client.get(
self.zk_path, params).result
if 'children' not in response['tree'][0]:
return []
if response['tree'][0]['data']['title'] == '/collections':
# solr 5.3 and older
data = response['tree'][0]['children']
else:
# solr 5.4+
data = None
for branch in response['tree']:
if data is not None:
break
for child in branch['children']:
if child['data']['title'] == '/collections':
if 'children' not in child:
return []
else:
data = child['children']
break
colls = []
if data:
colls = [node['data']['title'] for node in data]
return colls
def _list_cores(self):
"""
Retrieves a list of cores from solr admin
:return: a list of cores
:rtype: list
"""
params = {'wt': 'json', }
response = self.client.get(
('/{webappdir}/admin/cores'.format(webappdir=self.webappdir)), params).result
cores = response.get('status', {}).keys()
return cores
@property
def cluster_health(self):
"""
Determine the state of all nodes and collections in the cluster. Problematic nodes or
collections are returned, along with their state, otherwise an `OK` message is returned
:return: a dict representing the status of the cluster
:rtype: dict
"""
res = []
if semver.match(self.version, '<5.4.0'):
params = {'detail': 'true', 'path': '/clusterstate.json'}
response = self.client.get(
('/{webappdir}/zookeeper'.format(webappdir=self.webappdir)), params).result
data = json.loads(response['znode']['data'])
collections = self.list()
for coll in collections:
shards = data[coll]['shards']
for shard, shard_info in shards.iteritems():
replicas = shard_info['replicas']
for replica, info in replicas.iteritems():
state = info['state']
if state != 'active':
item = {"collection": coll,
"replica": replica,
"shard": shard,
"info": info,
}
res.append(item)
else:
params = {'action': 'CLUSTERSTATUS', 'wt': 'json'}
response = self.client.get(
('/{webappdir}/admin/collections'.format(webappdir=self.webappdir)), params).result
for collection_name, collection in response.dict['cluster']['collections'].items():
for shard_name, shard in collection['shards'].items():
for replica_name, replica in shard['replicas'].items():
if replica['state'] != 'active':
item = {"collection": collection_name,
"replica": replica_name,
"shard": shard_name,
"info": replica}
res.append(item)
if not res:
return {"status": "OK"}
return {"status": "NOT OK", "details": res}
@property
def cluster_leader(self):
"""
Gets the cluster leader
:rtype: dict
:return: a dict with the json loaded from the zookeeper response related to the cluster leader request
"""
params = {'detail': 'true', 'path': '/overseer_elect/leader'}
response = self.client.get(self.zk_path, params).result
return json.loads(response['znode']['data'])
@property
def live_nodes(self):
"""
Lists all nodes that are currently online
:return: a list of urls related to live nodes
:rtype: list
"""
params = {'detail': 'true', 'path': '/live_nodes'}
response = self.client.get(self.zk_path, params).result
children = [d['data']['title'] for d in response['tree'][0]['children']]
nodes = [c.replace('_solr', '') for c in children]
return [self.url_template.format(server=a) for a in nodes]
def create_collection(self, collname, *args, **kwargs):
r"""
Create a collection.
:param collname: The collection name
:type collname: str
:param \*args: additional arguments
:param \*\*kwargs: additional named parameters
:return: the created collection
:rtype: SolrCollection
"""
coll = collection.SolrCollection(self, collname)
return coll.create(*args, **kwargs)
def __getattr__(self, name):
"""
Convenience method for retrieving a solr collection
:param name: the name of the collection
:type name: str
:return: SolrCollection
"""
return collection.SolrCollection(self, name)
def __getitem__(self, name):
"""
Convenience method for retrieving a solr collection
:param name: the name of the collection
:type name: str
:return: SolrCollection
"""
return collection.SolrCollection(self, name)
def __dir__(self):
"""
Convenience method for viewing servers available in this connection
:return: a list of servers
:rtype: list
"""
return self.list()
def __repr__(self):
"""
Representation in Python outputs
:return: string representation
:rtype: str
"""
return "SolrConnection %s" % str(self.servers)
|
|
import h2o_cmd, h2o, h2o_util, h2o_gbm
import re, random, math
def pickRandGlmParams(paramDict, params):
colX = 0
randomGroupSize = random.randint(1,len(paramDict))
for i in range(randomGroupSize):
randomKey = random.choice(paramDict.keys())
randomV = paramDict[randomKey]
randomValue = random.choice(randomV)
params[randomKey] = randomValue
if (randomKey=='x'):
colX = randomValue
# force legal family/ink combos
if 'family' in params and 'link' in params:
if params['family'] is not None:
if params['family'] == 'poisson':
if params['link'] is not None and params['link'] not in ('identity', 'log', 'inverse', 'familyDefault'):
params['link'] = None
# only tweedie/tweedie is legal?
if params['family'] == 'tweedie':
if params['link'] is not None and params['link'] not in ('tweedie'):
params['link'] = None
if params['family'] == 'binomial':
if params['link'] is not None and params['link'] not in ('logit', 'identity', 'log', 'inverse', 'familyDefault'):
params['link'] = None
if params['family'] == 'gaussian':
if params['link'] is not None and params['link'] not in ('logit', 'identity', 'log', 'inverse', 'familyDefault'):
params['link'] = None
# case only used if binomial? binomial is default if no family
# update: apparently case and case_mode always affect things
# make sure the combo of case and case_mode makes sense
# there needs to be some entries in both effective cases
if ('case_mode' in params):
if ('case' not in params) or (params['case'] is None):
params['case'] = 1
else:
maxCase = max(paramDict['case'])
minCase = min(paramDict['case'])
if params['case_mode']=="<" and params['case']==minCase:
params['case'] += 1
elif params['case_mode']==">" and params['case']==maxCase:
params['case'] -= 1
elif params['case_mode']==">=" and params['case']==minCase:
params['case'] += 1
elif params['case_mode']=="<=" and params['case']==maxCase:
params['case'] -= 1
return colX
def simpleCheckGLMScore(self, glmScore, family='gaussian', allowFailWarning=False, **kwargs):
warnings = None
if 'warnings' in glmScore:
warnings = glmScore['warnings']
# stop on failed
x = re.compile("failed", re.IGNORECASE)
# don't stop if fail to converge
c = re.compile("converge", re.IGNORECASE)
for w in warnings:
print "\nwarning:", w
if re.search(x,w) and not allowFailWarning:
if re.search(c,w):
# ignore the fail to converge warning now
pass
else:
# stop on other 'fail' warnings (are there any? fail to solve?
raise Exception(w)
validation = glmScore['validation']
validation['err'] = h2o_util.cleanseInfNan(validation['err'])
validation['nullDev'] = h2o_util.cleanseInfNan(validation['nullDev'])
validation['resDev'] = h2o_util.cleanseInfNan(validation['resDev'])
print "%15s %s" % ("err:\t", validation['err'])
print "%15s %s" % ("nullDev:\t", validation['nullDev'])
print "%15s %s" % ("resDev:\t", validation['resDev'])
# threshold only there if binomial?
# auc only for binomial
if family=="binomial":
print "%15s %s" % ("auc:\t", validation['auc'])
print "%15s %s" % ("threshold:\t", validation['threshold'])
err = False
if family=="poisson" or family=="gaussian":
if 'aic' not in validation:
print "aic is missing from the glm json response"
err = True
if math.isnan(validation['err']):
print "Why is this err = 'nan'?? %6s %s" % ("err:\t", validation['err'])
err = True
if math.isnan(validation['resDev']):
print "Why is this resDev = 'nan'?? %6s %s" % ("resDev:\t", validation['resDev'])
err = True
if err:
raise Exception ("How am I supposed to tell that any of these errors should be ignored?")
# legal?
if math.isnan(validation['nullDev']):
## emsg = "Why is this nullDev = 'nan'?? %6s %s" % ("nullDev:\t", validation['nullDev'])
## raise Exception(emsg)
pass
def simpleCheckGLM(self, glm, colX, allowFailWarning=False, allowZeroCoeff=False,
prettyPrint=False, noPrint=False, maxExpectedIterations=None, doNormalized=False, **kwargs):
# if we hit the max_iter, that means it probably didn't converge. should be 1-maxExpectedIter
# h2o GLM will verboseprint the result and print errors.
# so don't have to do that
# different when cross validation is used? No trainingErrorDetails?
if h2o.beta_features:
GLMModel = glm['glm_model']
else:
GLMModel = glm['GLMModel']
if not GLMModel:
raise Exception("GLMModel didn't exist in the glm response? %s" % h2o.dump_json(glm))
warnings = None
if 'warnings' in GLMModel and GLMModel['warnings']:
warnings = GLMModel['warnings']
# stop on failed
x = re.compile("failed", re.IGNORECASE)
# don't stop if fail to converge
c = re.compile("converge", re.IGNORECASE)
for w in warnings:
print "\nwarning:", w
if re.search(x,w) and not allowFailWarning:
if re.search(c,w):
# ignore the fail to converge warning now
pass
else:
# stop on other 'fail' warnings (are there any? fail to solve?
raise Exception(w)
# for key, value in glm.iteritems(): print key
# not in GLMGrid?
# FIX! don't get GLMParams if it can't solve?
if h2o.beta_features:
GLMParams = GLMModel['glm']
else:
GLMParams = GLMModel["GLMParams"]
family = GLMParams["family"]
if h2o.beta_features:
# number of submodels = number of lambda
# min of 2. lambda_max is first
submodels = GLMModel['submodels']
lambdas = GLMModel['lambdas']
# since all our tests?? only use one lambda, the best_lamda_idx should = 1
best_lambda_idx = GLMModel['best_lambda_idx']
print "best_lambda_idx:", best_lambda_idx
lambda_max = GLMModel['lambda_max']
print "lambda_max:", lambda_max
# currently lambda_max is not set by tomas. ..i.e.not valid
if 1==0 and lambda_max <= lambdas[best_lambda_idx]:
raise Exception("lambda_max %s should always be > the lambda result %s we're checking" % (lambda_max, lambdas[best_lambda_idx]))
# submodels0 = submodels[0]
# submodels1 = submodels[-1] # hackery to make it work when there's just one
if (best_lambda_idx >= len(lambdas)) or (best_lambda_idx < 0):
raise Exception("best_lambda_idx: %s should point to one of lambdas (which has len %s)" % (best_lambda_idx, len(lambdas)))
if (best_lambda_idx >= len(submodels)) or (best_lambda_idx < 0):
raise Exception("best_lambda_idx: %s should point to one of submodels (which has len %s)" % (best_lambda_idx, len(submodels)))
submodels1 = submodels[best_lambda_idx] # hackery to make it work when there's just one
iterations = submodels1['iteration']
else:
iterations = GLMModel['iterations']
print "GLMModel/iterations:", iterations
# if we hit the max_iter, that means it probably didn't converge. should be 1-maxExpectedIter
if maxExpectedIterations is not None and iterations > maxExpectedIterations:
raise Exception("Convergence issue? GLM did iterations: %d which is greater than expected: %d" % (iterations, maxExpectedIterations) )
if h2o.beta_features:
if 'validation' not in submodels1:
raise Exception("Should be a 'validation' key in submodels1: %s" % h2o.dump_json(submodels1))
validationsList = submodels1['validation']
validations = validationsList
else:
# pop the first validation from the list
if 'validations' not in GLMModel:
raise Exception("Should be a 'validations' key in GLMModel: %s" % h2o.dump_json(GLMModel))
validationsList = GLMModel['validations']
# don't want to modify validationsList in case someone else looks at it
validations = validationsList[0]
# xval. compare what we asked for and what we got.
n_folds = kwargs.setdefault('n_folds', None)
# not checked in v2?
if not h2o.beta_features:
if not 'xval_models' in validations:
if n_folds > 1:
raise Exception("No cross validation models returned. Asked for "+n_folds)
else:
xval_models = validations['xval_models']
if n_folds and n_folds > 1:
if len(xval_models) != n_folds:
raise Exception(len(xval_models)+" cross validation models returned. Asked for "+n_folds)
else:
# should be default 10?
if len(xval_models) != 10:
raise Exception(str(len(xval_models))+" cross validation models returned. Default should be 10")
if h2o.beta_features:
print "GLMModel/validations"
validations['null_deviance'] = h2o_util.cleanseInfNan(validations['null_deviance'])
validations['residual_deviance'] = h2o_util.cleanseInfNan(validations['residual_deviance'])
print "%15s %s" % ("null_deviance:\t", validations['null_deviance'])
print "%15s %s" % ("residual_deviance:\t", validations['residual_deviance'])
else:
print "GLMModel/validations"
validations['err'] = h2o_util.cleanseInfNan(validations['err'])
validations['nullDev'] = h2o_util.cleanseInfNan(validations['nullDev'])
validations['resDev'] = h2o_util.cleanseInfNan(validations['resDev'])
print "%15s %s" % ("err:\t", validations['err'])
print "%15s %s" % ("nullDev:\t", validations['nullDev'])
print "%15s %s" % ("resDev:\t", validations['resDev'])
# threshold only there if binomial?
# auc only for binomial
if family=="binomial":
print "%15s %s" % ("auc:\t", validations['auc'])
if h2o.beta_features:
best_threshold = validations['best_threshold']
thresholds = validations['thresholds']
print "%15s %s" % ("best_threshold:\t", best_threshold)
# have to look up the index for the cm, from the thresholds list
best_index = None
# FIX! best_threshold isn't necessarily in the list. jump out if >=
for i,t in enumerate(thresholds):
if t >= best_threshold: # ends up using next one if not present
best_index = i
break
assert best_index!=None, "%s %s" % (best_threshold, thresholds)
print "Now printing the right 'best_threshold' %s from '_cms" % best_threshold
# cm = glm['glm_model']['submodels'][0]['validation']['_cms'][-1]
submodels = glm['glm_model']['submodels']
cms = submodels[0]['validation']['_cms']
assert best_index<len(cms), "%s %s" % (best_index, len(cms))
# if we want 0.5..rounds to int
# mid = len(cms)/2
# cm = cms[mid]
cm = cms[best_index]
print "cm:", h2o.dump_json(cm['_arr'])
predErr = cm['_predErr']
classErr = cm['_classErr']
# compare to predErr
pctWrong = h2o_gbm.pp_cm_summary(cm['_arr']);
print "predErr:", predErr
print "calculated pctWrong from cm:", pctWrong
print "classErr:", classErr
# self.assertLess(pctWrong, 9,"Should see less than 9% error (class = 4)")
print "\nTrain\n==========\n"
print h2o_gbm.pp_cm(cm['_arr'])
else:
print "%15s %s" % ("threshold:\t", validations['threshold'])
if family=="poisson" or family=="gaussian":
print "%15s %s" % ("aic:\t", validations['aic'])
if not h2o.beta_features:
if math.isnan(validations['err']):
emsg = "Why is this err = 'nan'?? %6s %s" % ("err:\t", validations['err'])
raise Exception(emsg)
if math.isnan(validations['resDev']):
emsg = "Why is this resDev = 'nan'?? %6s %s" % ("resDev:\t", validations['resDev'])
raise Exception(emsg)
# legal?
if math.isnan(validations['nullDev']):
pass
# get a copy, so we don't destroy the original when we pop the intercept
if h2o.beta_features:
coefficients_names = GLMModel['coefficients_names']
# print "coefficients_names:", coefficients_names
idxs = submodels1['idxs']
print "idxs:", idxs
coefficients_names = coefficients_names
# always check both normalized and normal coefficients
norm_beta = submodels1['norm_beta']
# if norm_beta and len(coefficients_names)!=len(norm_beta):
# print len(coefficients_names), len(norm_beta)
# raise Exception("coefficients_names and normalized_norm_beta from h2o json not same length. coefficients_names: %s normalized_norm_beta: %s" % (coefficients_names, norm_beta))
#
beta = submodels1['beta']
# print "beta:", beta
# if len(coefficients_names)!=len(beta):
# print len(coefficients_names), len(beta)
# raise Exception("coefficients_names and beta from h2o json not same length. coefficients_names: %s beta: %s" % (coefficients_names, beta))
# test wants to use normalized?
if doNormalized:
beta_used = norm_beta
else:
beta_used = beta
coefficients = {}
# create a dictionary with name, beta (including intercept) just like v1
for i,b in zip(idxs, beta_used[:-1]):
name = coefficients_names[i]
coefficients[name] = b
print "len(idxs)", len(idxs), "len(beta_used)", len(beta_used)
print "coefficients:", coefficients
print "beta:", beta
print "norm_beta:", norm_beta
coefficients['Intercept'] = beta_used[-1]
print "len(coefficients_names)", len(coefficients_names)
print "len(idxs)", len(idxs)
print "idxs[-1]", idxs[-1]
print "intercept demapping info:", \
"coefficients_names[-i]:", coefficients_names[-1], \
"idxs[-1]:", idxs[-1], \
"coefficients_names[idxs[-1]]:", coefficients_names[idxs[-1]], \
"beta_used[-1]:", beta_used[-1], \
"coefficients['Intercept']", coefficients['Intercept']
# last one is intercept
interceptName = coefficients_names[idxs[-1]]
if interceptName != "Intercept" or abs(beta_used[-1])<1e-26:
raise Exception("'Intercept' should be last in coefficients_names and beta %s %s %s" %\
(idxs[-1], beta_used[-1], "-"+interceptName+"-"))
# idxs has the order for non-zero coefficients, it's shorter than beta_used and coefficients_names
# new 5/28/14. glm can point to zero coefficients
# for i in idxs:
# if beta_used[i]==0.0:
## raise Exception("idxs shouldn't point to any 0 coefficients i: %s %s:" % (i, beta_used[i]))
if len(idxs) > len(beta_used):
raise Exception("idxs shouldn't be longer than beta_used %s %s" % (len(idxs), len(beta_used)))
intercept = coefficients.pop('Intercept', None)
# intercept demapping info: idxs[-1]: 54 coefficients_names[[idxs[-1]]: Intercept beta_used[-1]: -6.6866753099
# the last one shoudl be 'Intercept' ?
coefficients_names.pop()
else:
if doNormalized:
coefficients = GLMModel['normalized_coefficients'].copy()
else:
coefficients = GLMModel['coefficients'].copy()
coefficients_names = GLMModel['column_names']
# get the intercept out of there into it's own dictionary
intercept = coefficients.pop('Intercept', None)
print "First intercept:", intercept
# have to skip the output col! get it from kwargs
# better always be there!
if h2o.beta_features:
y = kwargs['response']
else:
y = kwargs['y']
# the dict keys are column headers if they exist...how to order those? new: use the 'coefficients_names'
# from the response
# Tomas created 'coefficients_names which is the coefficient list in order.
# Just use it to index coefficients! works for header or no-header cases
# I guess now we won't print the "None" cases for dropped columns (constant columns!)
# Because Tomas doesn't get everything in 'coefficients_names' if dropped by GLMQuery before
# he gets it?
def add_to_coefficient_list_and_string(c, cList, cString):
if c in coefficients:
cValue = coefficients[c]
cValueString = "%s: %.5e " % (c, cValue)
else:
print "Warning: didn't see '" + c + "' in json coefficient response.",\
"Inserting 'None' with assumption it was dropped due to constant column)"
cValue = None
cValueString = "%s: %s " % (c, cValue)
cList.append(cValue)
# we put each on newline for easy comparison to R..otherwise keep condensed
if prettyPrint:
cValueString = "H2O coefficient " + cValueString + "\n"
# not mutable?
return cString + cValueString
# creating both a string for printing and a list of values
cString = ""
cList = []
# print in order using col_names
# coefficients_names is input only now..same for header or no header, or expanded enums
for c in coefficients_names:
cString = add_to_coefficient_list_and_string(c, cList, cString)
if prettyPrint:
print "\nH2O intercept:\t\t%.5e" % intercept
print cString
else:
if not noPrint:
print "\nintercept:", intercept, cString
print "\nTotal # of coefficients:", len(coefficients_names)
# pick out the coefficent for the column we enabled for enhanced checking. Can be None.
# FIX! temporary hack to deal with disappearing/renaming columns in GLM
if (not allowZeroCoeff) and (colX is not None):
absXCoeff = abs(float(coefficients[str(colX)]))
self.assertGreater(absXCoeff, 1e-26, (
"abs. value of GLM coefficients['" + str(colX) + "'] is " +
str(absXCoeff) + ", not >= 1e-26 for X=" + str(colX)
))
# intercept is buried in there too
absIntercept = abs(float(intercept))
self.assertGreater(absIntercept, 1e-26, (
"abs. value of GLM coefficients['Intercept'] is " +
str(absIntercept) + ", not >= 1e-26 for Intercept"
))
# this is good if we just want min or max
# maxCoeff = max(coefficients, key=coefficients.get)
# for more, just invert the dictionary and ...
if (len(coefficients)>0):
maxKey = max([(abs(coefficients[x]),x) for x in coefficients])[1]
print "H2O Largest abs. coefficient value:", maxKey, coefficients[maxKey]
minKey = min([(abs(coefficients[x]),x) for x in coefficients])[1]
print "H2O Smallest abs. coefficient value:", minKey, coefficients[minKey]
else:
print "Warning, no coefficients returned. Must be intercept only?"
# many of the GLM tests aren't single column though.
# quick and dirty check: if all the coefficients are zero,
# something is broken
# intercept is in there too, but this will get it okay
# just sum the abs value up..look for greater than 0
# skip this test if there is just one coefficient. Maybe pointing to a non-important coeff?
if (not allowZeroCoeff) and (len(coefficients)>1):
s = 0.0
for c in coefficients:
v = coefficients[c]
s += abs(float(v))
self.assertGreater(s, 1e-26, (
"sum of abs. value of GLM coefficients/intercept is " + str(s) + ", not >= 1e-26"
))
if h2o.beta_features:
print "submodels1, run_time (milliseconds):", submodels1['run_time']
else:
print "GLMModel model time (milliseconds):", GLMModel['model_time']
print "GLMModel validation time (milliseconds):", validations['val_time']
print "GLMModel lsm time (milliseconds):", GLMModel['lsm_time']
# shouldn't have any errors
h2o.check_sandbox_for_errors()
return (warnings, cList, intercept)
# compare this glm to last one. since the files are concatenations,
# the results should be similar? 10% of first is allowed delta
def compareToFirstGlm(self, key, glm, firstglm):
# if isinstance(firstglm[key], list):
# in case it's not a list allready (err is a list)
h2o.verboseprint("compareToFirstGlm key:", key)
h2o.verboseprint("compareToFirstGlm glm[key]:", glm[key])
# key could be a list or not. if a list, don't want to create list of that list
# so use extend on an empty list. covers all cases?
if type(glm[key]) is list:
kList = glm[key]
firstkList = firstglm[key]
elif type(glm[key]) is dict:
raise Exception("compareToFirstGLm: Not expecting dict for " + key)
else:
kList = [glm[key]]
firstkList = [firstglm[key]]
print "kbn:", kList, firstkList
for k, firstk in zip(kList, firstkList):
# delta must be a positive number ?
delta = .1 * abs(float(firstk))
msg = "Too large a delta (" + str(delta) + ") comparing current and first for: " + key
self.assertAlmostEqual(float(k), float(firstk), delta=delta, msg=msg)
self.assertGreaterEqual(abs(float(k)), 0.0, str(k) + " abs not >= 0.0 in current")
def simpleCheckGLMGrid(self, glmGridResult, colX=None, allowFailWarning=False, **kwargs):
# "grid": {
# "destination_keys": [
# "GLMGridResults__8222a49156af52532a34fb3ce4304308_0",
# "GLMGridResults__8222a49156af52532a34fb3ce4304308_1",
# "GLMGridResults__8222a49156af52532a34fb3ce4304308_2"
# ]
# },
if h2o.beta_features:
destination_key = glmGridResult['grid']['destination_keys'][0]
inspectGG = h2o.nodes[0].glm_view(destination_key)
models = inspectGG['glm_model']['submodels']
h2o.verboseprint("GLMGrid inspect GLMGrid model 0(best):", h2o.dump_json(models[0]))
g = simpleCheckGLM(self, inspectGG, colX, allowFailWarning=allowFailWarning, **kwargs)
# just to get some save_model testing
for i,m in enumerate(glmGridResult['grid']['destination_keys']):
print "Saving model", m, "to model"+str(i)
h2o.nodes[0].save_model(model=m, path='model'+str(i), force=1)
else:
destination_key = glmGridResult['destination_key']
inspectGG = h2o_cmd.runInspect(None, destination_key)
h2o.verboseprint("Inspect of destination_key", destination_key,":\n", h2o.dump_json(inspectGG))
models = glmGridResult['models']
for m, model in enumerate(models):
alpha = model['alpha']
area_under_curve = model['area_under_curve']
# FIX! should check max error?
error_0 = model['error_0']
error_1 = model['error_1']
model_key = model['key']
print "#%s GLM model key: %s" % (m, model_key)
glm_lambda = model['lambda']
# now indirect to the GLM result/model that's first in the list (best)
inspectGLM = h2o_cmd.runInspect(None, glmGridResult['models'][0]['key'])
h2o.verboseprint("GLMGrid inspect GLMGrid model 0(best):", h2o.dump_json(inspectGLM))
g = simpleCheckGLM(self, inspectGLM, colX, allowFailWarning=allowFailWarning, **kwargs)
return g
# This gives me a comma separated x string, for all the columns, with cols with
# missing values, enums, and optionally matching a pattern, removed. useful for GLM
# since it removes rows with any col with NA
# get input from this.
# (missingValuesDict, constantValuesDict, enumSizeDict, colTypeDict, colNameDict) = \
# h2o_cmd.columnInfoFromInspect(parseResult['destination_key',
# exceptionOnMissingValues=False, timeoutSecs=300)
def goodXFromColumnInfo(y,
num_cols=None, missingValuesDict=None, constantValuesDict=None, enumSizeDict=None,
colTypeDict=None, colNameDict=None, keepPattern=None, key=None,
timeoutSecs=120, forRF=False, noPrint=False, returnStringX=True):
y = str(y)
# if we pass a key, means we want to get the info ourselves here
if key is not None:
(missingValuesDict, constantValuesDict, enumSizeDict, colTypeDict, colNameDict) = \
h2o_cmd.columnInfoFromInspect(key, exceptionOnMissingValues=False,
max_column_display=99999999, timeoutSecs=timeoutSecs)
num_cols = len(colNameDict)
# now remove any whose names don't match the required keepPattern
if keepPattern is not None:
keepX = re.compile(keepPattern)
else:
keepX = None
x = range(num_cols)
# need to walk over a copy, cause we change x
xOrig = x[:]
ignore_x = [] # for use by RF
for k in xOrig:
name = colNameDict[k]
# remove it if it has the same name as the y output
if str(k)== y: # if they pass the col index as y
if not noPrint:
print "Removing %d because name: %s matches output %s" % (k, str(k), y)
x.remove(k)
# rf doesn't want it in ignore list
# ignore_x.append(k)
elif name == y: # if they pass the name as y
if not noPrint:
print "Removing %d because name: %s matches output %s" % (k, name, y)
x.remove(k)
# rf doesn't want it in ignore list
# ignore_x.append(k)
elif keepX is not None and not keepX.match(name):
if not noPrint:
print "Removing %d because name: %s doesn't match desired keepPattern %s" % (k, name, keepPattern)
x.remove(k)
ignore_x.append(k)
# missing values reports as constant also. so do missing first.
# remove all cols with missing values
# could change it against num_rows for a ratio
elif k in missingValuesDict:
value = missingValuesDict[k]
if not noPrint:
print "Removing %d with name: %s because it has %d missing values" % (k, name, value)
x.remove(k)
ignore_x.append(k)
elif k in constantValuesDict:
value = constantValuesDict[k]
if not noPrint:
print "Removing %d with name: %s because it has constant value: %s " % (k, name, str(value))
x.remove(k)
ignore_x.append(k)
# this is extra pruning..
# remove all cols with enums, if not already removed
elif k in enumSizeDict:
value = enumSizeDict[k]
if not noPrint:
print "Removing %d %s because it has enums of size: %d" % (k, name, value)
x.remove(k)
ignore_x.append(k)
if not noPrint:
print "x has", len(x), "cols"
print "ignore_x has", len(ignore_x), "cols"
# this is probably used in 'cols" in v2, which can take numbers
if returnStringX:
x = ",".join(map(str, x))
if h2o.beta_features: # add the 'C" prefix because of ignored_cols_by_name (and the start-with-1 offset)
ignore_x = ",".join(map(lambda x: "C" + str(x+1), ignore_x))
else:
ignore_x = ",".join(map(lambda x: str(x), ignore_x))
if not noPrint:
print "\nx:", x
print "\nignore_x:", ignore_x
if forRF:
return ignore_x
else:
return x
|
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import webob
from nova.api.openstack.compute import extension_info
from nova import exception
from nova import policy
from nova import test
from nova.tests.unit.api.openstack import fakes
FAKE_UPDATED_DATE = extension_info.FAKE_UPDATED_DATE
class fake_extension(object):
def __init__(self, name, alias, description, version):
self.name = name
self.alias = alias
self.__doc__ = description
self.version = version
fake_extensions = {
'ext1-alias': fake_extension('ext1', 'ext1-alias', 'ext1 description', 1),
'ext2-alias': fake_extension('ext2', 'ext2-alias', 'ext2 description', 2),
'ext3-alias': fake_extension('ext3', 'ext3-alias', 'ext3 description', 1)
}
simulated_extension_list = {
'servers': fake_extension('Servers', 'servers', 'Servers.', 1),
'images': fake_extension('Images', 'images', 'Images.', 2),
'os-quota-sets': fake_extension('Quotas', 'os-quota-sets',
'Quotas management support', 1),
'os-cells': fake_extension('Cells', 'os-cells',
'Cells description', 1),
'os-flavor-access': fake_extension('FlavorAccess', 'os-flavor-access',
'Flavor access support.', 1)
}
def fake_policy_enforce(context, action, target, do_raise=True):
return True
def fake_policy_enforce_selective(context, action, target, do_raise=True):
if action == 'os_compute_api:ext1-alias:discoverable':
raise exception.Forbidden
else:
return True
class ExtensionInfoTest(test.NoDBTestCase):
def setUp(self):
super(ExtensionInfoTest, self).setUp()
ext_info = extension_info.LoadedExtensionInfo()
ext_info.extensions = fake_extensions
self.controller = extension_info.ExtensionInfoController(ext_info)
def test_extension_info_list(self):
self.stubs.Set(policy, 'enforce', fake_policy_enforce)
req = fakes.HTTPRequestV3.blank('/extensions')
res_dict = self.controller.index(req)
self.assertEqual(3, len(res_dict['extensions']))
for e in res_dict['extensions']:
self.assertIn(e['alias'], fake_extensions)
self.assertEqual(e['name'], fake_extensions[e['alias']].name)
self.assertEqual(e['alias'], fake_extensions[e['alias']].alias)
self.assertEqual(e['description'],
fake_extensions[e['alias']].__doc__)
self.assertEqual(e['updated'], FAKE_UPDATED_DATE)
self.assertEqual(e['links'], [])
self.assertEqual(6, len(e))
def test_extension_info_show(self):
self.stubs.Set(policy, 'enforce', fake_policy_enforce)
req = fakes.HTTPRequestV3.blank('/extensions/ext1-alias')
res_dict = self.controller.show(req, 'ext1-alias')
self.assertEqual(1, len(res_dict))
self.assertEqual(res_dict['extension']['name'],
fake_extensions['ext1-alias'].name)
self.assertEqual(res_dict['extension']['alias'],
fake_extensions['ext1-alias'].alias)
self.assertEqual(res_dict['extension']['description'],
fake_extensions['ext1-alias'].__doc__)
self.assertEqual(res_dict['extension']['updated'], FAKE_UPDATED_DATE)
self.assertEqual(res_dict['extension']['links'], [])
self.assertEqual(6, len(res_dict['extension']))
def test_extension_info_list_not_all_discoverable(self):
self.stubs.Set(policy, 'enforce', fake_policy_enforce_selective)
req = fakes.HTTPRequestV3.blank('/extensions')
res_dict = self.controller.index(req)
self.assertEqual(2, len(res_dict['extensions']))
for e in res_dict['extensions']:
self.assertNotEqual('ext1-alias', e['alias'])
self.assertIn(e['alias'], fake_extensions)
self.assertEqual(e['name'], fake_extensions[e['alias']].name)
self.assertEqual(e['alias'], fake_extensions[e['alias']].alias)
self.assertEqual(e['description'],
fake_extensions[e['alias']].__doc__)
self.assertEqual(e['updated'], FAKE_UPDATED_DATE)
self.assertEqual(e['links'], [])
self.assertEqual(6, len(e))
class ExtensionInfoV21Test(test.NoDBTestCase):
def setUp(self):
super(ExtensionInfoV21Test, self).setUp()
ext_info = extension_info.LoadedExtensionInfo()
ext_info.extensions = simulated_extension_list
self.controller = extension_info.ExtensionInfoController(ext_info)
self.stubs.Set(policy, 'enforce', fake_policy_enforce)
def test_extension_info_list(self):
req = fakes.HTTPRequest.blank('/extensions')
res_dict = self.controller.index(req)
self.assertEqual(12, len(res_dict['extensions']))
expected_output = copy.deepcopy(simulated_extension_list)
del expected_output['images']
del expected_output['servers']
expected_output['os-cell-capacities'] = fake_extension(
'CellCapacities', 'os-cell-capacities', '', -1)
expected_output['os-server-sort-keys'] = fake_extension(
'ServerSortKeys', 'os-server-sort-keys', '', -1)
expected_output['os-user-quotas'] = fake_extension(
'UserQuotas', 'os-user-quotas', '', -1)
expected_output['os-extended-quotas'] = fake_extension(
'ExtendedQuotas', 'os-extended-quotas', '', -1)
expected_output['os-create-server-ext'] = fake_extension(
'Createserverext', 'os-create-server-ext', '', -1)
expected_output['OS-EXT-IPS'] = fake_extension(
'ExtendedIps', 'OS-EXT-IPS', '', -1)
expected_output['OS-EXT-IPS-MAC'] = fake_extension(
'ExtendedIpsMac', 'OS-EXT-IPS-MAC', '', -1)
expected_output['os-server-list-multi-status'] = fake_extension(
'ServerListMultiStatus', 'os-server-list-multi-status', '', -1)
expected_output['os-server-start-stop'] = fake_extension(
'ServerStartStop', 'os-server-start-stop', '', -1)
for e in res_dict['extensions']:
self.assertIn(e['alias'], expected_output)
self.assertEqual(e['name'], expected_output[e['alias']].name)
self.assertEqual(e['alias'], expected_output[e['alias']].alias)
self.assertEqual(e['description'],
expected_output[e['alias']].__doc__)
self.assertEqual(e['updated'], FAKE_UPDATED_DATE)
self.assertEqual(e['links'], [])
self.assertEqual(6, len(e))
def test_extension_info_show(self):
req = fakes.HTTPRequest.blank('/extensions/os-cells')
res_dict = self.controller.show(req, 'os-cells')
self.assertEqual(1, len(res_dict))
self.assertEqual(res_dict['extension']['name'],
simulated_extension_list['os-cells'].name)
self.assertEqual(res_dict['extension']['alias'],
simulated_extension_list['os-cells'].alias)
self.assertEqual(res_dict['extension']['description'],
simulated_extension_list['os-cells'].__doc__)
self.assertEqual(res_dict['extension']['updated'], FAKE_UPDATED_DATE)
self.assertEqual(res_dict['extension']['links'], [])
self.assertEqual(6, len(res_dict['extension']))
def test_extension_info_show_servers_not_present(self):
req = fakes.HTTPRequest.blank('/extensions/servers')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, 'servers')
class ExtensionInfoPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(ExtensionInfoPolicyEnforcementV21, self).setUp()
ext_info = extension_info.LoadedExtensionInfo()
ext_info.extensions = fake_extensions
self.controller = extension_info.ExtensionInfoController(ext_info)
self.req = fakes.HTTPRequest.blank('')
def _test_extension_policy_failed(self, action, *args):
rule_name = "os_compute_api:extensions"
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
getattr(self.controller, action), self.req, *args)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_extension_index_policy_failed(self):
self._test_extension_policy_failed('index')
def test_extension_show_policy_failed(self):
self._test_extension_policy_failed('show', 1)
|
|
import importlib
import json
from datetime import timedelta, datetime
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.cache import cache
from django.core.mail import mail_managers
from django.core.urlresolvers import reverse
from django.db.models import Q, Count, Case, When
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseForbidden
from django.shortcuts import get_object_or_404, render, redirect
from django.utils import timezone
from django.utils.html import escape
from django.views.decorators.csrf import csrf_exempt
from grid.models import Grid
from package.forms import PackageForm, PackageExampleForm, DocumentationForm, ProjectImagesFormSet
from package.models import Category, Project, PackageExample, ProjectImage, TeamMembership
from package.repos import get_all_repos
from package.forms import TeamMembersFormSet
from profiles.models import Account, AccountType
from searchv2.builders import rebuild_project_search_index
def repo_data_for_js():
repos = [handler.serialize() for handler in get_all_repos()]
return json.dumps(repos)
def get_form_class(form_name):
bits = form_name.split('.')
form_module_name = '.'.join(bits[:-1])
form_module = importlib.import_module(form_module_name)
form_name = bits[-1]
return getattr(form_module, form_name)
@login_required
def add_package(request, template_name="package/package_form.html"):
if not request.user.profile.can_add_package:
return HttpResponseForbidden("permission denied")
new_package = Project()
form = PackageForm(request.POST or None, instance=new_package)
formset = TeamMembersFormSet(request.POST or None)
if form.is_valid() and formset.is_valid():
new_package = form.save()
new_package.draft_added_by = request.user
new_package.last_modified_by = request.user
new_package.save()
rebuild_project_search_index(new_package)
#new_package.fetch_metadata()
#new_package.fetch_commits()
for inlineform in formset:
if hasattr(inlineform, 'cleaned_data') and inlineform.cleaned_data:
data = inlineform.cleaned_data
account_type = AccountType.objects.get(name=data['account_type'])
account, created = Account.objects.get_or_create(
account_type=account_type,
name=data['account_name']
)
membership = TeamMembership.objects.create(account=account, project=new_package, role=data['role'])
membership.save()
return HttpResponseRedirect(reverse("package", kwargs={"slug": new_package.slug}))
return render(request, template_name, {
"form": form,
"formset": formset,
"repo_data": repo_data_for_js(),
"action": "add",
})
@login_required
def edit_package(request, slug, template_name="package/package_form.html"):
package = get_object_or_404(Project, slug=slug)
if not request.user.profile.can_edit_package(package):
return HttpResponseForbidden("permission denied")
form = PackageForm(request.POST or None, instance=package)
initial = [
{
'role': tm.role,
'account_name': tm.account.name,
'account_type': tm.account.type,
'role_confirmed_by_account': tm.role_confirmed_by_account,
'avatar_small': tm.account.avatar_small,
'initialized': True,
}
for tm in package.teammembership_set.all()
]
if request.POST:
formset = TeamMembersFormSet(request.POST)
else:
formset = TeamMembersFormSet(initial=initial)
formset.extra = 0
if form.is_valid() and formset.is_valid():
modified_package = form.save()
modified_package.last_modified_by = request.user
modified_package.save()
rebuild_project_search_index(modified_package)
for inlineform in formset:
if hasattr(inlineform, 'cleaned_data') and inlineform.cleaned_data:
data = inlineform.cleaned_data
account_type = AccountType.objects.get(name=data['account_type'])
if data['DELETE']:
account = Account.objects.get(account_type=account_type, name=data['account_name'])
membership = TeamMembership.objects.get(account=account, project=modified_package)
membership.delete()
else:
account, __ = Account.objects.get_or_create(account_type=account_type, name=data['account_name'])
membership, __ = TeamMembership.objects.get_or_create(account=account, project=modified_package)
membership.role = data['role']
membership.save()
if package.is_published:
messages.add_message(request, messages.INFO, 'Project updated successfully')
return HttpResponseRedirect(reverse("package", kwargs={"slug": modified_package.slug}))
return render(request, template_name, {
"form": form,
"formset": formset,
"package": package,
"repo_data": repo_data_for_js(),
"action": "Save",
})
@login_required
def update_package(request, slug):
package = get_object_or_404(Project, slug=slug)
package.fetch_metadata()
package.fetch_commits()
package.last_fetched = timezone.now()
messages.add_message(request, messages.INFO, 'Project updated successfully')
return HttpResponseRedirect(reverse("package", kwargs={"slug": package.slug}))
@login_required
def project_approval(request, slug, action):
project = get_object_or_404(Project, slug=slug)
project.is_awaiting_approval = action == 'request'
project.approval_request_datetime = datetime.now()
project.save()
if action == 'request':
mail_managers(
escape('New project added by @{} awaiting approval - {}'.format(
project.draft_added_by.username,
project.name
)),
'Project: {}'.format(request.build_absolute_uri(reverse('package', kwargs={'slug': project.slug})))
)
return HttpResponseRedirect(reverse("package", kwargs={"slug": project.slug}))
@login_required
def publish_project(request, slug):
project = get_object_or_404(Project, slug=slug)
try:
project.publish(publisher=request.user)
rebuild_project_search_index(project)
messages.add_message(request, messages.INFO, 'Project is published!')
return HttpResponseRedirect(reverse("package", kwargs={"slug": project.slug}))
except PermissionError:
return HttpResponseForbidden("permission denied")
@login_required
def edit_images(request, slug, template_name="package/images_form.html"):
project = get_object_or_404(Project, slug=slug)
if not request.user.profile.can_edit_package(project):
return HttpResponseForbidden("permission denied")
if request.POST:
formset = ProjectImagesFormSet(data=request.POST, files=request.FILES, project=project,)
else:
formset = ProjectImagesFormSet(project=project, queryset=ProjectImage.objects.filter(project=project))
if formset.is_valid():
formset.save()
messages.add_message(request, messages.INFO, 'Project updated successfully')
return HttpResponseRedirect(reverse("package", kwargs={"slug": project.slug}))
return render(request, template_name, {
"formset": formset,
"package": project,
"action": "Save",
})
@login_required
def add_example(request, slug, template_name="package/add_example.html"):
package = get_object_or_404(Project, slug=slug)
new_package_example = PackageExample()
form = PackageExampleForm(request.POST or None, instance=new_package_example)
if form.is_valid():
package_example = PackageExample(package=package,
title=request.POST["title"],
url=request.POST["url"])
package_example.save()
return HttpResponseRedirect(reverse("package", kwargs={"slug": package_example.package.slug}))
return render(request, template_name, {
"form": form,
"package": package
})
@login_required
def edit_example(request, slug, id, template_name="package/edit_example.html"):
package_example = get_object_or_404(PackageExample, id=id)
form = PackageExampleForm(request.POST or None, instance=package_example)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse("package", kwargs={"slug": package_example.package.slug}))
return render(request, template_name, {
"form": form,
"package": package_example.package
})
def package_autocomplete(request):
"""
Provides Package matching based on matches of the beginning
"""
names = []
q = request.GET.get("q", "")
if q:
names = (x.name for x in Project.objects.filter(name__istartswith=q))
response = HttpResponse("\n".join(names))
setattr(response, "djangologging.suppress_output", True)
return response
def category(request, slug, template_name="package/package_grid.html"):
category_ = get_object_or_404(Category, slug=slug)
context = {
'categories': [
{
"title_plural": category_.title_plural,
"count": category_.project_set.published().count(),
"description": category_.description,
"packages": category_.project_set.published().select_related().annotate(usage_count=Count("usage"))
}
]
}
return render(request, template_name, context)
def ajax_package_list(request, template_name="package/ajax_package_list.html"):
q = request.GET.get("q", "")
packages = []
if q:
_dash = "%s-%s" % (settings.PACKAGINATOR_SEARCH_PREFIX, q)
_space = "%s %s" % (settings.PACKAGINATOR_SEARCH_PREFIX, q)
_underscore = '%s_%s' % (settings.PACKAGINATOR_SEARCH_PREFIX, q)
packages = Project.objects.filter(
Q(name__istartswith=q) |
Q(name__istartswith=_dash) |
Q(name__istartswith=_space) |
Q(name__istartswith=_underscore)
)
packages_already_added_list = []
grid_slug = request.GET.get("grid", "")
if packages and grid_slug:
grids = Grid.objects.filter(slug=grid_slug)
if grids:
grid = grids[0]
packages_already_added_list = [x['slug'] for x in grid.packages.all().values('slug')]
new_packages = tuple(packages.exclude(slug__in=packages_already_added_list))[:20]
number_of_packages = len(new_packages)
if number_of_packages < 20:
try:
old_packages = packages.filter(slug__in=packages_already_added_list)[:20 - number_of_packages]
except AssertionError:
old_packages = None
if old_packages:
old_packages = tuple(old_packages)
packages = new_packages + old_packages
else:
packages = new_packages
return render(request, template_name, {
"packages": packages,
'packages_already_added_list': packages_already_added_list,
}
)
@login_required
def usage(request, slug, action):
success = False
package = get_object_or_404(Project, slug=slug)
# Update the current user's usage of the given package as specified by the
# request.
if package.usage.filter(username=request.user.username):
if action.lower() == 'add':
# The user is already using the package
success = True
change = 0
else:
# If the action was not add and the user has already specified
# they are a use the package then remove their usage.
package.usage.remove(request.user)
success = True
change = -1
else:
if action.lower() == 'lower':
# The user is not using the package
success = True
change = 0
else:
# If the action was not lower and the user is not already using
# the package then add their usage.
package.usage.add(request.user)
success = True
change = 1
# Invalidate the cache of this users's used_packages_list.
if change == 1 or change == -1:
cache_key = "sitewide_used_packages_list_%s" % request.user.pk
cache.delete(cache_key)
package.grid_clear_detail_template_cache()
# Return an ajax-appropriate response if necessary
if request.is_ajax():
response = {'success': success}
if success:
response['change'] = change
return HttpResponse(json.dumps(response))
# Intelligently determine the URL to redirect the user to based on the
# available information.
next = request.GET.get('next') or request.META.get("HTTP_REFERER") or reverse("package", kwargs={"slug": package.slug})
return HttpResponseRedirect(next)
def python3_list(request, template_name="package/python3_list.html"):
packages = Project.objects.filter(version__supports_python3=True).distinct()
packages = packages.order_by("-pypi_downloads", "-repo_watchers", "name")
values = "category, category_id, commit, commit_list, created, added_by, added_by_id, documentation_url, dpotw, grid, gridpackage, id, last_fetched, last_modified_by, last_modified_by_id, modified, packageexample, participants, pypi_downloads, pypi_url, repo_description, repo_forks, repo_url, repo_watchers, slug, name, usage, version".split(',')
values = [x.strip() for x in values]
if request.GET.get('sort') and request.GET.get('sort') not in values:
# Some people have cached older versions of this view
request.GET = request.GET.copy()
del request.GET['sort']
return render(
request,
template_name, {
"packages": packages
}
)
def package_list(request, template_name="package/package_grid.html"):
context = {
'categories': [
{
"title_plural": category.title_plural,
"count": category.project_count,
"description": category.description,
"packages": category.project_set.published().order_by("-repo_watchers", "name")
}
for category in Category.objects.annotate(
project_count=Count(Case(When(project__is_published=True, then=1)))
)
]
}
return render(request, template_name, context)
def package_detail(request, slug, template_name="package/package.html"):
package = get_object_or_404(Project, slug=slug)
no_development = package.no_development
try:
if package.category == Category.objects.get(slug='projects'):
# projects get a bye because they are a website
pypi_ancient = False
pypi_no_release = False
else:
pypi_ancient = package.pypi_ancient
pypi_no_release = package.pypi_ancient is None
warnings = no_development or pypi_ancient or pypi_no_release
except Category.DoesNotExist:
pypi_ancient = False
pypi_no_release = False
warnings = no_development
if request.GET.get("message"):
messages.add_message(request, messages.INFO, request.GET.get("message"))
if package.is_draft:
if package.is_awaiting_approval:
messages.add_message(
request,
messages.INFO,
'This project is waiting for approval.',
extra_tags='data-stick'
)
else:
messages.add_message(
request,
messages.WARNING,
'Information about this project is not published yet. This is a draft!<br>' +
'Add as much information about this project as you can. Add logo and some screenshots, add at least few timeline events.<br> ' +
'When you will decide it is ready, submit a project for approval.',
#' by <a href="https://google.com/">trusted users of SteemProjects</a>.'
# 'Also, learn <a href="">how you can become a trusted user</a>.',
extra_tags='draft data-stick'
)
proj_imgs = []
if package.main_img:
proj_imgs.append(package.main_img)
proj_imgs.extend(ProjectImage.objects.exclude(pk=package.main_img.pk).filter(project=package).order_by('img'))
else:
proj_imgs.extend(ProjectImage.objects.filter(project=package).order_by('img'))
all_github_accounts_of_teammambers = [
ac.pk
for profile in [ac.profile for ac in package.team_members.all() if ac.profile]
for ac in profile.account_set.all() if ac.type == Account.TYPE_GITHUB
]
can_edit_package = hasattr(request.user, "profile") and request.user.profile.can_edit_package(package)
events_on_timeline = 5
timeline_events = package.events.order_by('-date')
timeline_axis_end = timeline_axis_start = None
if timeline_events.count() > 0:
timeline_end = timeline_events.first()
timeline_start = timeline_events[events_on_timeline-1] if timeline_events.count() > events_on_timeline else timeline_events[0]
timeline_axis_start = timeline_start.date - timedelta(30)
timeline_axis_end = timeline_end.date + timedelta(30)
return render(request, template_name,
dict(
package=package,
timeline_events=timeline_events,
timeline_axis_start=timeline_axis_start,
timeline_axis_end=timeline_axis_end,
project_imgs=[pi.img for pi in proj_imgs],
pypi_ancient=pypi_ancient,
no_development=no_development,
pypi_no_release=pypi_no_release,
warnings=warnings,
latest_version=package.last_released(),
repo=package.repo,
not_team_contributors=package.contributors.exclude(pk__in=all_github_accounts_of_teammambers),
can_edit_package=can_edit_package
)
)
def int_or_0(value):
try:
return int(value)
except ValueError:
return 0
@login_required
def post_data(request, slug):
# if request.method == "POST":
# try:
# # TODO Do this this with a form, really. Duh!
# package.repo_watchers = int_or_0(request.POST.get("repo_watchers"))
# package.repo_forks = int_or_0(request.POST.get("repo_forks"))
# package.repo_description = request.POST.get("repo_description")
# package.participants = request.POST.get('contributors')
# package.fetch_commits() # also saves
# except Exception as e:
# print e
package = get_object_or_404(Project, slug=slug)
package.fetch_pypi_data()
package.repo.fetch_metadata(package)
package.repo.fetch_commits(package)
package.last_fetched = timezone.now()
package.save()
return HttpResponseRedirect(reverse("package", kwargs={"slug": package.slug}))
@login_required
def edit_documentation(request, slug, template_name="package/documentation_form.html"):
package = get_object_or_404(Project, slug=slug)
form = DocumentationForm(request.POST or None, instance=package)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'Package documentation updated successfully')
return redirect(package)
return render(request, template_name,
dict(
package=package,
form=form
)
)
@csrf_exempt
def github_webhook(request):
if request.method == "POST":
data = json.loads(request.POST['payload'])
# Webhook Test
if "zen" in data:
return HttpResponse(data['hook_id'])
repo_url = data['repository']['url']
# service test
if repo_url == "http://github.com/mojombo/grit":
return HttpResponse("Service Test pass")
package = get_object_or_404(Project, repo_url=repo_url)
package.repo.fetch_commits(package)
package.last_fetched = timezone.now()
package.save()
return HttpResponse()
|
|
# mako/_ast_util.py
# Copyright 2006-2021 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
ast
~~~
This is a stripped down version of Armin Ronacher's ast module.
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
from _ast import Add
from _ast import And
from _ast import AST
from _ast import BitAnd
from _ast import BitOr
from _ast import BitXor
from _ast import Div
from _ast import Eq
from _ast import FloorDiv
from _ast import Gt
from _ast import GtE
from _ast import If
from _ast import In
from _ast import Invert
from _ast import Is
from _ast import IsNot
from _ast import LShift
from _ast import Lt
from _ast import LtE
from _ast import Mod
from _ast import Mult
from _ast import Name
from _ast import Not
from _ast import NotEq
from _ast import NotIn
from _ast import Or
from _ast import PyCF_ONLY_AST
from _ast import RShift
from _ast import Sub
from _ast import UAdd
from _ast import USub
BOOLOP_SYMBOLS = {And: "and", Or: "or"}
BINOP_SYMBOLS = {
Add: "+",
Sub: "-",
Mult: "*",
Div: "/",
FloorDiv: "//",
Mod: "%",
LShift: "<<",
RShift: ">>",
BitOr: "|",
BitAnd: "&",
BitXor: "^",
}
CMPOP_SYMBOLS = {
Eq: "==",
Gt: ">",
GtE: ">=",
In: "in",
Is: "is",
IsNot: "is not",
Lt: "<",
LtE: "<=",
NotEq: "!=",
NotIn: "not in",
}
UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"}
ALL_SYMBOLS = {}
ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
ALL_SYMBOLS.update(BINOP_SYMBOLS)
ALL_SYMBOLS.update(CMPOP_SYMBOLS)
ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
def parse(expr, filename="<unknown>", mode="exec"):
"""Parse an expression into an AST node."""
return compile(expr, filename, mode, PyCF_ONLY_AST)
def iter_fields(node):
"""Iterate over all fields of a node, only yielding existing fields."""
for field in node._fields:
try:
yield field, getattr(node, field)
except AttributeError:
pass
class NodeVisitor:
"""
Walks the abstract syntax tree and call visitor functions for every node
found. The visitor functions may return values which will be forwarded
by the `visit` method.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `get_visitor` function. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
Don't use the `NodeVisitor` if you want to apply changes to nodes during
traversing. For this a special visitor exists (`NodeTransformer`) that
allows modifications.
"""
def get_visitor(self, node):
"""
Return the visitor function for this node or `None` if no visitor
exists for this node. In that case the generic visit function is
used instead.
"""
method = "visit_" + node.__class__.__name__
return getattr(self, method, None)
def visit(self, node):
"""Visit a node."""
f = self.get_visitor(node)
if f is not None:
return f(node)
return self.generic_visit(node)
def generic_visit(self, node):
"""Called if no explicit visitor function exists for a node."""
for field, value in iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, AST):
self.visit(item)
elif isinstance(value, AST):
self.visit(value)
class NodeTransformer(NodeVisitor):
"""
Walks the abstract syntax tree and allows modifications of nodes.
The `NodeTransformer` will walk the AST and use the return value of the
visitor functions to replace or remove the old node. If the return
value of the visitor function is `None` the node will be removed
from the previous location otherwise it's replaced with the return
value. The return value may be the original node in which case no
replacement takes place.
Here an example transformer that rewrites all `foo` to `data['foo']`::
class RewriteName(NodeTransformer):
def visit_Name(self, node):
return copy_location(Subscript(
value=Name(id='data', ctx=Load()),
slice=Index(value=Str(s=node.id)),
ctx=node.ctx
), node)
Keep in mind that if the node you're operating on has child nodes
you must either transform the child nodes yourself or call the generic
visit function for the node first.
Nodes that were part of a collection of statements (that applies to
all statement nodes) may also return a list of nodes rather than just
a single node.
Usually you use the transformer like this::
node = YourTransformer().visit(node)
"""
def generic_visit(self, node):
for field, old_value in iter_fields(node):
old_value = getattr(node, field, None)
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, AST):
value = self.visit(value)
if value is None:
continue
elif not isinstance(value, AST):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, AST):
new_node = self.visit(old_value)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
class SourceGenerator(NodeVisitor):
"""
This visitor is able to transform a well formed syntax tree into python
sourcecode. For more details have a look at the docstring of the
`node_to_source` function.
"""
def __init__(self, indent_with):
self.result = []
self.indent_with = indent_with
self.indentation = 0
self.new_lines = 0
def write(self, x):
if self.new_lines:
if self.result:
self.result.append("\n" * self.new_lines)
self.result.append(self.indent_with * self.indentation)
self.new_lines = 0
self.result.append(x)
def newline(self, n=1):
self.new_lines = max(self.new_lines, n)
def body(self, statements):
self.new_line = True
self.indentation += 1
for stmt in statements:
self.visit(stmt)
self.indentation -= 1
def body_or_else(self, node):
self.body(node.body)
if node.orelse:
self.newline()
self.write("else:")
self.body(node.orelse)
def signature(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(", ")
else:
want_comma.append(True)
padding = [None] * (len(node.args) - len(node.defaults))
for arg, default in zip(node.args, padding + node.defaults):
write_comma()
self.visit(arg)
if default is not None:
self.write("=")
self.visit(default)
if node.vararg is not None:
write_comma()
self.write("*" + node.vararg.arg)
if node.kwarg is not None:
write_comma()
self.write("**" + node.kwarg.arg)
def decorators(self, node):
for decorator in node.decorator_list:
self.newline()
self.write("@")
self.visit(decorator)
# Statements
def visit_Assign(self, node):
self.newline()
for idx, target in enumerate(node.targets):
if idx:
self.write(", ")
self.visit(target)
self.write(" = ")
self.visit(node.value)
def visit_AugAssign(self, node):
self.newline()
self.visit(node.target)
self.write(BINOP_SYMBOLS[type(node.op)] + "=")
self.visit(node.value)
def visit_ImportFrom(self, node):
self.newline()
self.write("from %s%s import " % ("." * node.level, node.module))
for idx, item in enumerate(node.names):
if idx:
self.write(", ")
self.write(item)
def visit_Import(self, node):
self.newline()
for item in node.names:
self.write("import ")
self.visit(item)
def visit_Expr(self, node):
self.newline()
self.generic_visit(node)
def visit_FunctionDef(self, node):
self.newline(n=2)
self.decorators(node)
self.newline()
self.write("def %s(" % node.name)
self.signature(node.args)
self.write("):")
self.body(node.body)
def visit_ClassDef(self, node):
have_args = []
def paren_or_comma():
if have_args:
self.write(", ")
else:
have_args.append(True)
self.write("(")
self.newline(n=3)
self.decorators(node)
self.newline()
self.write("class %s" % node.name)
for base in node.bases:
paren_or_comma()
self.visit(base)
# XXX: the if here is used to keep this module compatible
# with python 2.6.
if hasattr(node, "keywords"):
for keyword in node.keywords:
paren_or_comma()
self.write(keyword.arg + "=")
self.visit(keyword.value)
if getattr(node, "starargs", None):
paren_or_comma()
self.write("*")
self.visit(node.starargs)
if getattr(node, "kwargs", None):
paren_or_comma()
self.write("**")
self.visit(node.kwargs)
self.write(have_args and "):" or ":")
self.body(node.body)
def visit_If(self, node):
self.newline()
self.write("if ")
self.visit(node.test)
self.write(":")
self.body(node.body)
while True:
else_ = node.orelse
if len(else_) == 1 and isinstance(else_[0], If):
node = else_[0]
self.newline()
self.write("elif ")
self.visit(node.test)
self.write(":")
self.body(node.body)
else:
self.newline()
self.write("else:")
self.body(else_)
break
def visit_For(self, node):
self.newline()
self.write("for ")
self.visit(node.target)
self.write(" in ")
self.visit(node.iter)
self.write(":")
self.body_or_else(node)
def visit_While(self, node):
self.newline()
self.write("while ")
self.visit(node.test)
self.write(":")
self.body_or_else(node)
def visit_With(self, node):
self.newline()
self.write("with ")
self.visit(node.context_expr)
if node.optional_vars is not None:
self.write(" as ")
self.visit(node.optional_vars)
self.write(":")
self.body(node.body)
def visit_Pass(self, node):
self.newline()
self.write("pass")
def visit_Print(self, node):
# XXX: python 2.6 only
self.newline()
self.write("print ")
want_comma = False
if node.dest is not None:
self.write(" >> ")
self.visit(node.dest)
want_comma = True
for value in node.values:
if want_comma:
self.write(", ")
self.visit(value)
want_comma = True
if not node.nl:
self.write(",")
def visit_Delete(self, node):
self.newline()
self.write("del ")
for idx, target in enumerate(node):
if idx:
self.write(", ")
self.visit(target)
def visit_TryExcept(self, node):
self.newline()
self.write("try:")
self.body(node.body)
for handler in node.handlers:
self.visit(handler)
def visit_TryFinally(self, node):
self.newline()
self.write("try:")
self.body(node.body)
self.newline()
self.write("finally:")
self.body(node.finalbody)
def visit_Global(self, node):
self.newline()
self.write("global " + ", ".join(node.names))
def visit_Nonlocal(self, node):
self.newline()
self.write("nonlocal " + ", ".join(node.names))
def visit_Return(self, node):
self.newline()
self.write("return ")
self.visit(node.value)
def visit_Break(self, node):
self.newline()
self.write("break")
def visit_Continue(self, node):
self.newline()
self.write("continue")
def visit_Raise(self, node):
# XXX: Python 2.6 / 3.0 compatibility
self.newline()
self.write("raise")
if hasattr(node, "exc") and node.exc is not None:
self.write(" ")
self.visit(node.exc)
if node.cause is not None:
self.write(" from ")
self.visit(node.cause)
elif hasattr(node, "type") and node.type is not None:
self.visit(node.type)
if node.inst is not None:
self.write(", ")
self.visit(node.inst)
if node.tback is not None:
self.write(", ")
self.visit(node.tback)
# Expressions
def visit_Attribute(self, node):
self.visit(node.value)
self.write("." + node.attr)
def visit_Call(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(", ")
else:
want_comma.append(True)
self.visit(node.func)
self.write("(")
for arg in node.args:
write_comma()
self.visit(arg)
for keyword in node.keywords:
write_comma()
self.write(keyword.arg + "=")
self.visit(keyword.value)
if getattr(node, "starargs", None):
write_comma()
self.write("*")
self.visit(node.starargs)
if getattr(node, "kwargs", None):
write_comma()
self.write("**")
self.visit(node.kwargs)
self.write(")")
def visit_Name(self, node):
self.write(node.id)
def visit_NameConstant(self, node):
self.write(str(node.value))
def visit_arg(self, node):
self.write(node.arg)
def visit_Str(self, node):
self.write(repr(node.s))
def visit_Bytes(self, node):
self.write(repr(node.s))
def visit_Num(self, node):
self.write(repr(node.n))
# newly needed in Python 3.8
def visit_Constant(self, node):
self.write(repr(node.value))
def visit_Tuple(self, node):
self.write("(")
idx = -1
for idx, item in enumerate(node.elts):
if idx:
self.write(", ")
self.visit(item)
self.write(idx and ")" or ",)")
def sequence_visit(left, right):
def visit(self, node):
self.write(left)
for idx, item in enumerate(node.elts):
if idx:
self.write(", ")
self.visit(item)
self.write(right)
return visit
visit_List = sequence_visit("[", "]")
visit_Set = sequence_visit("{", "}")
del sequence_visit
def visit_Dict(self, node):
self.write("{")
for idx, (key, value) in enumerate(zip(node.keys, node.values)):
if idx:
self.write(", ")
self.visit(key)
self.write(": ")
self.visit(value)
self.write("}")
def visit_BinOp(self, node):
self.write("(")
self.visit(node.left)
self.write(" %s " % BINOP_SYMBOLS[type(node.op)])
self.visit(node.right)
self.write(")")
def visit_BoolOp(self, node):
self.write("(")
for idx, value in enumerate(node.values):
if idx:
self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)])
self.visit(value)
self.write(")")
def visit_Compare(self, node):
self.write("(")
self.visit(node.left)
for op, right in zip(node.ops, node.comparators):
self.write(" %s " % CMPOP_SYMBOLS[type(op)])
self.visit(right)
self.write(")")
def visit_UnaryOp(self, node):
self.write("(")
op = UNARYOP_SYMBOLS[type(node.op)]
self.write(op)
if op == "not":
self.write(" ")
self.visit(node.operand)
self.write(")")
def visit_Subscript(self, node):
self.visit(node.value)
self.write("[")
self.visit(node.slice)
self.write("]")
def visit_Slice(self, node):
if node.lower is not None:
self.visit(node.lower)
self.write(":")
if node.upper is not None:
self.visit(node.upper)
if node.step is not None:
self.write(":")
if not (isinstance(node.step, Name) and node.step.id == "None"):
self.visit(node.step)
def visit_ExtSlice(self, node):
for idx, item in node.dims:
if idx:
self.write(", ")
self.visit(item)
def visit_Yield(self, node):
self.write("yield ")
self.visit(node.value)
def visit_Lambda(self, node):
self.write("lambda ")
self.signature(node.args)
self.write(": ")
self.visit(node.body)
def visit_Ellipsis(self, node):
self.write("Ellipsis")
def generator_visit(left, right):
def visit(self, node):
self.write(left)
self.visit(node.elt)
for comprehension in node.generators:
self.visit(comprehension)
self.write(right)
return visit
visit_ListComp = generator_visit("[", "]")
visit_GeneratorExp = generator_visit("(", ")")
visit_SetComp = generator_visit("{", "}")
del generator_visit
def visit_DictComp(self, node):
self.write("{")
self.visit(node.key)
self.write(": ")
self.visit(node.value)
for comprehension in node.generators:
self.visit(comprehension)
self.write("}")
def visit_IfExp(self, node):
self.visit(node.body)
self.write(" if ")
self.visit(node.test)
self.write(" else ")
self.visit(node.orelse)
def visit_Starred(self, node):
self.write("*")
self.visit(node.value)
def visit_Repr(self, node):
# XXX: python 2.6 only
self.write("`")
self.visit(node.value)
self.write("`")
# Helper Nodes
def visit_alias(self, node):
self.write(node.name)
if node.asname is not None:
self.write(" as " + node.asname)
def visit_comprehension(self, node):
self.write(" for ")
self.visit(node.target)
self.write(" in ")
self.visit(node.iter)
if node.ifs:
for if_ in node.ifs:
self.write(" if ")
self.visit(if_)
def visit_excepthandler(self, node):
self.newline()
self.write("except")
if node.type is not None:
self.write(" ")
self.visit(node.type)
if node.name is not None:
self.write(" as ")
self.visit(node.name)
self.write(":")
self.body(node.body)
|
|
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import logging
import os.path
import re
from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version
from pipenv.patched.notpip._vendor.six.moves.urllib import parse as urllib_parse
from pipenv.patched.notpip._vendor.six.moves.urllib import request as urllib_request
from pipenv.patched.notpip._internal.exceptions import BadCommand
from pipenv.patched.notpip._internal.utils.misc import display_path, hide_url
from pipenv.patched.notpip._internal.utils.subprocess import make_command
from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory
from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING
from pipenv.patched.notpip._internal.vcs.versioncontrol import (
RemoteNotFoundError,
VersionControl,
find_path_to_setup_from_repo_root,
vcs,
)
if MYPY_CHECK_RUNNING:
from typing import Optional, Tuple
from pipenv.patched.notpip._internal.utils.misc import HiddenText
from pipenv.patched.notpip._internal.vcs.versioncontrol import AuthInfo, RevOptions
urlsplit = urllib_parse.urlsplit
urlunsplit = urllib_parse.urlunsplit
logger = logging.getLogger(__name__)
HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$')
def looks_like_hash(sha):
return bool(HASH_REGEX.match(sha))
class Git(VersionControl):
name = 'git'
dirname = '.git'
repo_name = 'clone'
schemes = (
'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
)
# Prevent the user's environment variables from interfering with pip:
# https://github.com/pypa/pip/issues/1130
unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
default_arg_rev = 'HEAD'
@staticmethod
def get_base_rev_args(rev):
return [rev]
def is_immutable_rev_checkout(self, url, dest):
# type: (str, str) -> bool
_, rev_options = self.get_url_rev_options(hide_url(url))
if not rev_options.rev:
return False
if not self.is_commit_id_equal(dest, rev_options.rev):
# the current commit is different from rev,
# which means rev was something else than a commit hash
return False
# return False in the rare case rev is both a commit hash
# and a tag or a branch; we don't want to cache in that case
# because that branch/tag could point to something else in the future
is_tag_or_branch = bool(
self.get_revision_sha(dest, rev_options.rev)[0]
)
return not is_tag_or_branch
def get_git_version(self):
VERSION_PFX = 'git version '
version = self.run_command(['version'], show_stdout=False)
if version.startswith(VERSION_PFX):
version = version[len(VERSION_PFX):].split()[0]
else:
version = ''
# get first 3 positions of the git version because
# on windows it is x.y.z.windows.t, and this parses as
# LegacyVersion which always smaller than a Version.
version = '.'.join(version.split('.')[:3])
return parse_version(version)
@classmethod
def get_current_branch(cls, location):
"""
Return the current branch, or None if HEAD isn't at a branch
(e.g. detached HEAD).
"""
# git-symbolic-ref exits with empty stdout if "HEAD" is a detached
# HEAD rather than a symbolic ref. In addition, the -q causes the
# command to exit with status code 1 instead of 128 in this case
# and to suppress the message to stderr.
args = ['symbolic-ref', '-q', 'HEAD']
output = cls.run_command(
args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
)
ref = output.strip()
if ref.startswith('refs/heads/'):
return ref[len('refs/heads/'):]
return None
def export(self, location, url):
# type: (str, HiddenText) -> None
"""Export the Git repository at the url to the destination location"""
if not location.endswith('/'):
location = location + '/'
with TempDirectory(kind="export") as temp_dir:
self.unpack(temp_dir.path, url=url)
self.run_command(
['checkout-index', '-a', '-f', '--prefix', location],
show_stdout=False, cwd=temp_dir.path
)
@classmethod
def get_revision_sha(cls, dest, rev):
"""
Return (sha_or_none, is_branch), where sha_or_none is a commit hash
if the revision names a remote branch or tag, otherwise None.
Args:
dest: the repository directory.
rev: the revision name.
"""
# Pass rev to pre-filter the list.
output = cls.run_command(['show-ref', rev], cwd=dest,
show_stdout=False, on_returncode='ignore')
refs = {}
for line in output.strip().splitlines():
try:
sha, ref = line.split()
except ValueError:
# Include the offending line to simplify troubleshooting if
# this error ever occurs.
raise ValueError('unexpected show-ref line: {!r}'.format(line))
refs[ref] = sha
branch_ref = 'refs/remotes/origin/{}'.format(rev)
tag_ref = 'refs/tags/{}'.format(rev)
sha = refs.get(branch_ref)
if sha is not None:
return (sha, True)
sha = refs.get(tag_ref)
return (sha, False)
@classmethod
def resolve_revision(cls, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> RevOptions
"""
Resolve a revision to a new RevOptions object with the SHA1 of the
branch, tag, or ref if found.
Args:
rev_options: a RevOptions object.
"""
rev = rev_options.arg_rev
# The arg_rev property's implementation for Git ensures that the
# rev return value is always non-None.
assert rev is not None
sha, is_branch = cls.get_revision_sha(dest, rev)
if sha is not None:
rev_options = rev_options.make_new(sha)
rev_options.branch_name = rev if is_branch else None
return rev_options
# Do not show a warning for the common case of something that has
# the form of a Git commit hash.
if not looks_like_hash(rev):
logger.warning(
"Did not find branch or tag '%s', assuming revision or ref.",
rev,
)
if not rev.startswith('refs/'):
return rev_options
# If it looks like a ref, we have to fetch it explicitly.
cls.run_command(
make_command('fetch', '-q', url, rev_options.to_args()),
cwd=dest,
)
# Change the revision to the SHA of the ref we fetched
sha = cls.get_revision(dest, rev='FETCH_HEAD')
rev_options = rev_options.make_new(sha)
return rev_options
@classmethod
def is_commit_id_equal(cls, dest, name):
"""
Return whether the current commit hash equals the given name.
Args:
dest: the repository directory.
name: a string name.
"""
if not name:
# Then avoid an unnecessary subprocess call.
return False
return cls.get_revision(dest) == name
def fetch_new(self, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> None
rev_display = rev_options.to_display()
logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest))
self.run_command(make_command('clone', '-q', url, dest))
if rev_options.rev:
# Then a specific revision was requested.
rev_options = self.resolve_revision(dest, url, rev_options)
branch_name = getattr(rev_options, 'branch_name', None)
if branch_name is None:
# Only do a checkout if the current commit id doesn't match
# the requested revision.
if not self.is_commit_id_equal(dest, rev_options.rev):
cmd_args = make_command(
'checkout', '-q', rev_options.to_args(),
)
self.run_command(cmd_args, cwd=dest)
elif self.get_current_branch(dest) != branch_name:
# Then a specific branch was requested, and that branch
# is not yet checked out.
track_branch = 'origin/{}'.format(branch_name)
cmd_args = [
'checkout', '-b', branch_name, '--track', track_branch,
]
self.run_command(cmd_args, cwd=dest)
#: repo may contain submodules
self.update_submodules(dest)
def switch(self, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> None
self.run_command(
make_command('config', 'remote.origin.url', url),
cwd=dest,
)
cmd_args = make_command('checkout', '-q', rev_options.to_args())
self.run_command(cmd_args, cwd=dest)
self.update_submodules(dest)
def update(self, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> None
# First fetch changes from the default remote
if self.get_git_version() >= parse_version('1.9.0'):
# fetch tags in addition to everything else
self.run_command(['fetch', '-q', '--tags'], cwd=dest)
else:
self.run_command(['fetch', '-q'], cwd=dest)
# Then reset to wanted revision (maybe even origin/master)
rev_options = self.resolve_revision(dest, url, rev_options)
cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args())
self.run_command(cmd_args, cwd=dest)
#: update submodules
self.update_submodules(dest)
@classmethod
def get_remote_url(cls, location):
"""
Return URL of the first remote encountered.
Raises RemoteNotFoundError if the repository does not have a remote
url configured.
"""
# We need to pass 1 for extra_ok_returncodes since the command
# exits with return code 1 if there are no matching lines.
stdout = cls.run_command(
['config', '--get-regexp', r'remote\..*\.url'],
extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
)
remotes = stdout.splitlines()
try:
found_remote = remotes[0]
except IndexError:
raise RemoteNotFoundError
for remote in remotes:
if remote.startswith('remote.origin.url '):
found_remote = remote
break
url = found_remote.split(' ')[1]
return url.strip()
@classmethod
def get_revision(cls, location, rev=None):
if rev is None:
rev = 'HEAD'
current_rev = cls.run_command(
['rev-parse', rev], show_stdout=False, cwd=location,
)
return current_rev.strip()
@classmethod
def get_subdirectory(cls, location):
"""
Return the path to setup.py, relative to the repo root.
Return None if setup.py is in the repo root.
"""
# find the repo root
git_dir = cls.run_command(
['rev-parse', '--git-dir'],
show_stdout=False, cwd=location).strip()
if not os.path.isabs(git_dir):
git_dir = os.path.join(location, git_dir)
repo_root = os.path.abspath(os.path.join(git_dir, '..'))
return find_path_to_setup_from_repo_root(location, repo_root)
@classmethod
def get_url_rev_and_auth(cls, url):
# type: (str) -> Tuple[str, Optional[str], AuthInfo]
"""
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
That's required because although they use SSH they sometimes don't
work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
parsing. Hence we remove it again afterwards and return it as a stub.
"""
# Works around an apparent Git bug
# (see https://article.gmane.org/gmane.comp.version-control.git/146500)
scheme, netloc, path, query, fragment = urlsplit(url)
if scheme.endswith('file'):
initial_slashes = path[:-len(path.lstrip('/'))]
newpath = (
initial_slashes +
urllib_request.url2pathname(path)
.replace('\\', '/').lstrip('/')
)
url = urlunsplit((scheme, netloc, newpath, query, fragment))
after_plus = scheme.find('+') + 1
url = scheme[:after_plus] + urlunsplit(
(scheme[after_plus:], netloc, newpath, query, fragment),
)
if '://' not in url:
assert 'file:' not in url
url = url.replace('git+', 'git+ssh://')
url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
url = url.replace('ssh://', '')
else:
url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
return url, rev, user_pass
@classmethod
def update_submodules(cls, location):
if not os.path.exists(os.path.join(location, '.gitmodules')):
return
cls.run_command(
['submodule', 'update', '--init', '--recursive', '-q'],
cwd=location,
)
@classmethod
def controls_location(cls, location):
if super(Git, cls).controls_location(location):
return True
try:
r = cls.run_command(['rev-parse'],
cwd=location,
show_stdout=False,
on_returncode='ignore',
log_failed_cmd=False)
return not r
except BadCommand:
logger.debug("could not determine if %s is under git control "
"because git is not available", location)
return False
vcs.register(Git)
|
|
"""Common settings and globals."""
from os.path import abspath, basename, dirname, join, normpath
from sys import path
from os import environ
# Normally you should not import ANYTHING from Django directly
# into your settings, but ImproperlyConfigured is an exception.
from django.core.exceptions import ImproperlyConfigured
def get_env_setting(setting):
""" Get the environment setting or return exception """
try:
return environ[setting]
except KeyError:
error_msg = "Set the %s env variable" % setting
raise ImproperlyConfigured(error_msg)
########## PATH CONFIGURATION
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(dirname(abspath(__file__))))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
########## END PATH CONFIGURATION
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
########## END DEBUG CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Gregory Favre', 'info@gregoryfavre.ch'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'Europe/Zurich'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'fr-CH'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = normpath(join(SITE_ROOT, 'media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = normpath(join(SITE_ROOT, 'staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
STATICFILES_DIRS = [
join(DJANGO_ROOT, "static"),
]
########## END STATIC FILE CONFIGURATION
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key should only be used for development and testing.
SECRET_KEY = get_env_setting('SECRET_KEY')
########## END SECRET CONFIGURATION
########## SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
########## END SITE CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
normpath(join(SITE_ROOT, 'fixtures')),
)
########## END FIXTURE CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'DIRS': [
normpath(join(DJANGO_ROOT, 'templates')),
],
'OPTIONS': {
'context_processors': (
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
'sekizai.context_processors.sekizai',
)
}
},
]
########## END TEMPLATE CONFIGURATION
########## MIDDLEWARE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#middleware-classes
MIDDLEWARE_CLASSES = (
# Default Django middleware.
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## URL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = '%s.urls' % SITE_NAME
########## END URL CONFIGURATION
########## APP CONFIGURATION
DJANGO_APPS = [
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin panel and documentation:
'django.contrib.admin',
# 'django.contrib.admindocs',
]
THIRD_PARTY_APPS = [
'captcha', # recaptcha
'compressor',
'sekizai',
]
# Apps specific for this project go here.
LOCAL_APPS = [
'contact',
]
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
########## END APP CONFIGURATION
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## WSGI CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = '%s.wsgi.application' % SITE_NAME
########## END WSGI CONFIGURATION
RECAPTCHA_PUBLIC_KEY = get_env_setting('RECAPTCHA_PUBLIC_KEY')
RECAPTCHA_PRIVATE_KEY = get_env_setting('RECAPTCHA_PRIVATE_KEY')
|
|
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Nicira Networks, Inc.
# Copyright 2011 Citrix Systems
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Somik Behera, Nicira Networks, Inc.
# @author: Brad Hall, Nicira Networks, Inc.
# @author: Salvatore Orlando, Citrix
""" Functions providing implementation for CLI commands. """
import logging
import os
import sys
FORMAT = "json"
LOG = logging.getLogger('quantum.client.cli_lib')
class OutputTemplate(object):
""" A class for generating simple templated output.
Based on Python templating mechanism.
Templates can also express attributes on objects, such as network.id;
templates can also be nested, thus allowing for iteration on inner
templates.
Examples:
1) template with class attributes
Name: %(person.name)s \n
Surname: %(person.surname)s \n
2) template with iteration
Telephone numbers: \n
%(phone_numbers|Telephone number:%(number)s)
3) template with iteration and class attributes
Addresses: \n
%(Addresses|Street:%(address.street)s\nNumber%(address.number))
Instances of this class are initialized with a template string and
the dictionary for performing substition. The class implements the
__str__ method, so it can be directly printed.
"""
def __init__(self, template, data):
self._template = template
self.data = data
def __str__(self):
return self._template % self
def __getitem__(self, key):
items = key.split("|")
if len(items) == 1:
return self._make_attribute(key)
else:
# Note(salvatore-orlando): items[0] must be subscriptable
return self._make_list(self.data[items[0]], items[1])
def _make_attribute(self, item):
""" Renders an entity attribute key in the template.
e.g.: entity.attribute
"""
items = item.split('.')
if len(items) == 1:
return self.data[item]
elif len(items) == 2:
return self.data[items[0]][items[1]]
def _make_list(self, items, inner_template):
""" Renders a list key in the template.
e.g.: %(list|item data:%(item))
"""
#make sure list is subscriptable
if not hasattr(items, '__getitem__'):
raise Exception("Element is not iterable")
return "\n".join([inner_template % item for item in items])
class CmdOutputTemplate(OutputTemplate):
""" This class provides templated output for CLI commands.
Extends OutputTemplate loading a different template for each command.
"""
_templates = {
"list_nets": "Virtual Networks for Tenant %(tenant_id)s\n" +
"%(networks|\tNetwork ID: %(id)s)s",
"show_net": "Network ID: %(network.id)s\n" +
"network Name: %(network.name)s",
"create_net": "Created a new Virtual Network with ID: " +
"%(network_id)s\n" +
"for Tenant: %(tenant_id)s",
"update_net": "Updated Virtual Network with ID: %(network.id)s\n" +
"for Tenant: %(tenant_id)s\n",
"delete_net": "Deleted Virtual Network with ID: %(network_id)s\n" +
"for Tenant %(tenant_id)s",
"list_ports": "Ports on Virtual Network: %(network_id)s\n" +
"for Tenant: %(tenant_id)s\n" +
"%(ports|\tLogical Port: %(id)s)s",
"create_port": "Created new Logical Port with ID: %(port_id)s\n" +
"on Virtual Network: %(network_id)s\n" +
"for Tenant: %(tenant_id)s",
"show_port": "Logical Port ID: %(port.id)s\n" +
"administrative State: %(port.state)s\n" +
"interface: %(port.attachment)s\n" +
"on Virtual Network: %(network_id)s\n" +
"for Tenant: %(tenant_id)s",
"update_port": "Updated Logical Port " +
"with ID: %(port.id)s\n" +
"on Virtual Network: %(network_id)s\n" +
"for tenant: %(tenant_id)s",
"delete_port": "Deleted Logical Port with ID: %(port_id)s\n" +
"on Virtual Network: %(network_id)s\n" +
"for Tenant: %(tenant_id)s",
"plug_iface": "Plugged interface %(attachment)s\n" +
"into Logical Port: %(port_id)s\n" +
"on Virtual Network: %(network_id)s\n" +
"for Tenant: %(tenant_id)s",
"unplug_iface": "Unplugged interface from Logical Port:" +
"%(port_id)s\n" +
"on Virtual Network: %(network_id)s\n" +
"for Tenant: %(tenant_id)s"}
def __init__(self, cmd, data):
super(CmdOutputTemplate, self).__init__(self._templates[cmd], data)
def _handle_exception(ex):
LOG.exception(sys.exc_info())
print "Exception:%s - %s" % (sys.exc_info()[0], sys.exc_info()[1])
status_code = None
message = None
# Retrieve dict at 1st element of tuple at last argument
if ex.args and isinstance(ex.args[-1][0], dict):
status_code = ex.args[-1][0].get('status_code', None)
message = ex.args[-1][0].get('message', None)
msg_1 = "Command failed with error code: %s" \
% (status_code or '<missing>')
msg_2 = "Error message:%s" % (message or '<missing>')
LOG.exception(msg_1 + "-" + msg_2)
print msg_1
print msg_2
def prepare_output(cmd, tenant_id, response):
LOG.debug("Preparing output for response:%s", response)
response['tenant_id'] = tenant_id
output = str(CmdOutputTemplate(cmd, response))
LOG.debug("Finished preparing output for command:%s", cmd)
return output
def list_nets(client, *args):
tenant_id = args[0]
res = client.list_networks()
LOG.debug("Operation 'list_networks' executed.")
output = prepare_output("list_nets", tenant_id, res)
print output
def create_net(client, *args):
tenant_id, name = args
data = {'network': {'name': name}}
new_net_id = None
try:
res = client.create_network(data)
new_net_id = res["network"]["id"]
LOG.debug("Operation 'create_network' executed.")
output = prepare_output("create_net", tenant_id,
dict(network_id=new_net_id))
print output
except Exception as ex:
_handle_exception(ex)
def delete_net(client, *args):
tenant_id, network_id = args
try:
client.delete_network(network_id)
LOG.debug("Operation 'delete_network' executed.")
output = prepare_output("delete_net", tenant_id,
dict(network_id=network_id))
print output
except Exception as ex:
_handle_exception(ex)
def show_net(client, *args):
tenant_id, network_id = args
try:
#NOTE(salvatore-orlando) changed for returning exclusively
# output for GET /networks/{net-id} API operation
res = client.show_network_details(network_id)["network"]
LOG.debug("Operation 'show_network_details' executed.")
output = prepare_output("show_net", tenant_id,
dict(network=res))
print output
except Exception as ex:
_handle_exception(ex)
def update_net(client, *args):
tenant_id, network_id, param_data = args
data = {'network': {}}
for kv in param_data.split(","):
k, v = kv.split("=")
data['network'][k] = v
data['network']['id'] = network_id
try:
client.update_network(network_id, data)
LOG.debug("Operation 'update_network' executed.")
# Response has no body. Use data for populating output
output = prepare_output("update_net", tenant_id, data)
print output
except Exception as ex:
_handle_exception(ex)
def list_ports(client, *args):
tenant_id, network_id = args
try:
ports = client.list_ports(network_id)
LOG.debug("Operation 'list_ports' executed.")
data = ports
data['network_id'] = network_id
output = prepare_output("list_ports", tenant_id, data)
print output
except Exception as ex:
_handle_exception(ex)
def create_port(client, *args):
tenant_id, network_id = args
try:
res = client.create_port(network_id)
LOG.debug("Operation 'create_port' executed.")
new_port_id = res["port"]["id"]
output = prepare_output("create_port", tenant_id,
dict(network_id=network_id,
port_id=new_port_id))
print output
except Exception as ex:
_handle_exception(ex)
def delete_port(client, *args):
tenant_id, network_id, port_id = args
try:
client.delete_port(network_id, port_id)
LOG.debug("Operation 'delete_port' executed.")
output = prepare_output("delete_port", tenant_id,
dict(network_id=network_id,
port_id=port_id))
print output
except Exception as ex:
_handle_exception(ex)
return
def show_port(client, *args):
tenant_id, network_id, port_id = args
try:
port = client.show_port_details(network_id, port_id)["port"]
LOG.debug("Operation 'list_port_details' executed.")
#NOTE(salvatore-orland): current API implementation does not
#return attachment with GET operation on port. Once API alignment
#branch is merged, update client to use the detail action.
# (danwent) Until then, just make additonal webservice call.
attach = client.show_port_attachment(network_id, port_id)['attachment']
if "id" in attach:
port['attachment'] = attach['id']
else:
port['attachment'] = '<none>'
output = prepare_output("show_port", tenant_id,
dict(network_id=network_id,
port=port))
print output
except Exception as ex:
_handle_exception(ex)
def update_port(client, *args):
tenant_id, network_id, port_id, param_data = args
data = {'port': {}}
for kv in param_data.split(","):
k, v = kv.split("=")
data['port'][k] = v
data['network_id'] = network_id
data['port']['id'] = port_id
try:
client.update_port(network_id, port_id, data)
LOG.debug("Operation 'udpate_port' executed.")
# Response has no body. Use data for populating output
output = prepare_output("update_port", tenant_id, data)
print output
except Exception as ex:
_handle_exception(ex)
def plug_iface(client, *args):
tenant_id, network_id, port_id, attachment = args
try:
data = {'attachment': {'id': '%s' % attachment}}
client.attach_resource(network_id, port_id, data)
LOG.debug("Operation 'attach_resource' executed.")
output = prepare_output("plug_iface", tenant_id,
dict(network_id=network_id,
port_id=port_id,
attachment=attachment))
print output
except Exception as ex:
_handle_exception(ex)
def unplug_iface(client, *args):
tenant_id, network_id, port_id = args
try:
client.detach_resource(network_id, port_id)
LOG.debug("Operation 'detach_resource' executed.")
output = prepare_output("unplug_iface", tenant_id,
dict(network_id=network_id,
port_id=port_id))
print output
except Exception as ex:
_handle_exception(ex)
|
|
# -*- coding: utf-8 -*-
"""
This file is part of the PROPheT tool.
Copyright (C) 2016: MKLab <pmitzias@iti.gr; mriga@iti.gr; skontopo@iti.gr>
http://mklab.iti.gr/project/prophet-ontology-populator
https://github.com/MKLab-ITI/prophet
Licensed under the Apache License, Version 2.0 (the "License").
You may use this file in compliance with the License.
For more details, see LICENCE file.
"""
# Form implementation generated from reading ui file 'ui\SearchByInstanceWizard.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_WizardSearchByInstanceName(object):
def setupUi(self, WizardSearchByInstanceName):
WizardSearchByInstanceName.setObjectName(_fromUtf8("WizardSearchByInstanceName"))
WizardSearchByInstanceName.resize(684, 513)
WizardSearchByInstanceName.setFocusPolicy(QtCore.Qt.TabFocus)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/images/search_by_existing_instance.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
WizardSearchByInstanceName.setWindowIcon(icon)
WizardSearchByInstanceName.setWizardStyle(QtGui.QWizard.ClassicStyle)
WizardSearchByInstanceName.setOptions(QtGui.QWizard.NoCancelButton)
self.wizardPage1 = QtGui.QWizardPage()
self.wizardPage1.setTitle(_fromUtf8(""))
self.wizardPage1.setObjectName(_fromUtf8("wizardPage1"))
self.gridLayout = QtGui.QGridLayout(self.wizardPage1)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.lblSearchInstanceInEM = QtGui.QLabel(self.wizardPage1)
font = QtGui.QFont()
font.setPointSize(10)
self.lblSearchInstanceInEM.setFont(font)
self.lblSearchInstanceInEM.setWordWrap(True)
self.lblSearchInstanceInEM.setObjectName(_fromUtf8("lblSearchInstanceInEM"))
self.gridLayout.addWidget(self.lblSearchInstanceInEM, 0, 0, 1, 1)
self.treeWidgetSelectInstanceToSearchFor = QtGui.QTreeWidget(self.wizardPage1)
self.treeWidgetSelectInstanceToSearchFor.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.treeWidgetSelectInstanceToSearchFor.setAlternatingRowColors(True)
self.treeWidgetSelectInstanceToSearchFor.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.treeWidgetSelectInstanceToSearchFor.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.treeWidgetSelectInstanceToSearchFor.setObjectName(_fromUtf8("treeWidgetSelectInstanceToSearchFor"))
self.treeWidgetSelectInstanceToSearchFor.header().setVisible(False)
self.gridLayout.addWidget(self.treeWidgetSelectInstanceToSearchFor, 1, 0, 1, 1)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.lineEditSearchTreePage1 = QtGui.QLineEdit(self.wizardPage1)
self.lineEditSearchTreePage1.setEnabled(True)
self.lineEditSearchTreePage1.setObjectName(_fromUtf8("lineEditSearchTreePage1"))
self.horizontalLayout_3.addWidget(self.lineEditSearchTreePage1)
self.btnExpandAll_page1 = QtGui.QPushButton(self.wizardPage1)
self.btnExpandAll_page1.setEnabled(True)
self.btnExpandAll_page1.setMinimumSize(QtCore.QSize(53, 0))
self.btnExpandAll_page1.setMaximumSize(QtCore.QSize(53, 16777215))
self.btnExpandAll_page1.setObjectName(_fromUtf8("btnExpandAll_page1"))
self.horizontalLayout_3.addWidget(self.btnExpandAll_page1)
self.btnCollapseAll_page1 = QtGui.QPushButton(self.wizardPage1)
self.btnCollapseAll_page1.setEnabled(True)
self.btnCollapseAll_page1.setMinimumSize(QtCore.QSize(53, 0))
self.btnCollapseAll_page1.setMaximumSize(QtCore.QSize(53, 16777215))
self.btnCollapseAll_page1.setObjectName(_fromUtf8("btnCollapseAll_page1"))
self.horizontalLayout_3.addWidget(self.btnCollapseAll_page1)
self.gridLayout.addLayout(self.horizontalLayout_3, 2, 0, 1, 1)
WizardSearchByInstanceName.addPage(self.wizardPage1)
self.wizardPage2 = QtGui.QWizardPage()
self.wizardPage2.setTitle(_fromUtf8(""))
self.wizardPage2.setObjectName(_fromUtf8("wizardPage2"))
self.gridLayout_2 = QtGui.QGridLayout(self.wizardPage2)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.lblSearchForInstanceClassesFound = QtGui.QLabel(self.wizardPage2)
font = QtGui.QFont()
font.setPointSize(10)
self.lblSearchForInstanceClassesFound.setFont(font)
self.lblSearchForInstanceClassesFound.setWordWrap(True)
self.lblSearchForInstanceClassesFound.setObjectName(_fromUtf8("lblSearchForInstanceClassesFound"))
self.gridLayout_2.addWidget(self.lblSearchForInstanceClassesFound, 0, 0, 1, 1)
self.treeWidgetSelectClasses = QtGui.QTreeWidget(self.wizardPage2)
self.treeWidgetSelectClasses.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.treeWidgetSelectClasses.setAlternatingRowColors(True)
self.treeWidgetSelectClasses.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.treeWidgetSelectClasses.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.treeWidgetSelectClasses.setObjectName(_fromUtf8("treeWidgetSelectClasses"))
self.treeWidgetSelectClasses.header().setVisible(False)
self.gridLayout_2.addWidget(self.treeWidgetSelectClasses, 1, 0, 1, 1)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.btnAllPage2 = QtGui.QPushButton(self.wizardPage2)
self.btnAllPage2.setMaximumSize(QtCore.QSize(35, 16777215))
self.btnAllPage2.setFlat(False)
self.btnAllPage2.setObjectName(_fromUtf8("btnAllPage2"))
self.horizontalLayout.addWidget(self.btnAllPage2)
self.btnNonePage2 = QtGui.QPushButton(self.wizardPage2)
self.btnNonePage2.setMaximumSize(QtCore.QSize(35, 16777215))
self.btnNonePage2.setObjectName(_fromUtf8("btnNonePage2"))
self.horizontalLayout.addWidget(self.btnNonePage2)
self.lineEditSearchTreePage2 = QtGui.QLineEdit(self.wizardPage2)
self.lineEditSearchTreePage2.setEnabled(False)
self.lineEditSearchTreePage2.setObjectName(_fromUtf8("lineEditSearchTreePage2"))
self.horizontalLayout.addWidget(self.lineEditSearchTreePage2)
self.gridLayout_2.addLayout(self.horizontalLayout, 2, 0, 1, 1)
WizardSearchByInstanceName.addPage(self.wizardPage2)
self.wizardPage3 = QtGui.QWizardPage()
self.wizardPage3.setObjectName(_fromUtf8("wizardPage3"))
self.gridLayout_6 = QtGui.QGridLayout(self.wizardPage3)
self.gridLayout_6.setObjectName(_fromUtf8("gridLayout_6"))
self.lblSearchForInstanceInstancesFound = QtGui.QLabel(self.wizardPage3)
font = QtGui.QFont()
font.setPointSize(10)
self.lblSearchForInstanceInstancesFound.setFont(font)
self.lblSearchForInstanceInstancesFound.setWordWrap(True)
self.lblSearchForInstanceInstancesFound.setObjectName(_fromUtf8("lblSearchForInstanceInstancesFound"))
self.gridLayout_6.addWidget(self.lblSearchForInstanceInstancesFound, 0, 0, 1, 1)
self.treeWidgetSelectInstancesToImport = QtGui.QTreeWidget(self.wizardPage3)
self.treeWidgetSelectInstancesToImport.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.treeWidgetSelectInstancesToImport.setAlternatingRowColors(True)
self.treeWidgetSelectInstancesToImport.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.treeWidgetSelectInstancesToImport.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.treeWidgetSelectInstancesToImport.setObjectName(_fromUtf8("treeWidgetSelectInstancesToImport"))
self.treeWidgetSelectInstancesToImport.header().setVisible(False)
self.gridLayout_6.addWidget(self.treeWidgetSelectInstancesToImport, 1, 0, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.btnAllPage3 = QtGui.QPushButton(self.wizardPage3)
self.btnAllPage3.setMaximumSize(QtCore.QSize(35, 16777215))
self.btnAllPage3.setFlat(False)
self.btnAllPage3.setObjectName(_fromUtf8("btnAllPage3"))
self.horizontalLayout_2.addWidget(self.btnAllPage3)
self.btnNonePage3 = QtGui.QPushButton(self.wizardPage3)
self.btnNonePage3.setMaximumSize(QtCore.QSize(35, 16777215))
self.btnNonePage3.setObjectName(_fromUtf8("btnNonePage3"))
self.horizontalLayout_2.addWidget(self.btnNonePage3)
self.lineEditSearchTreePage3 = QtGui.QLineEdit(self.wizardPage3)
self.lineEditSearchTreePage3.setEnabled(True)
self.lineEditSearchTreePage3.setObjectName(_fromUtf8("lineEditSearchTreePage3"))
self.horizontalLayout_2.addWidget(self.lineEditSearchTreePage3)
self.btnExpandAll_page3 = QtGui.QPushButton(self.wizardPage3)
self.btnExpandAll_page3.setEnabled(True)
self.btnExpandAll_page3.setMinimumSize(QtCore.QSize(53, 0))
self.btnExpandAll_page3.setMaximumSize(QtCore.QSize(53, 16777215))
self.btnExpandAll_page3.setObjectName(_fromUtf8("btnExpandAll_page3"))
self.horizontalLayout_2.addWidget(self.btnExpandAll_page3)
self.btnCollapseAll_page3 = QtGui.QPushButton(self.wizardPage3)
self.btnCollapseAll_page3.setEnabled(True)
self.btnCollapseAll_page3.setMinimumSize(QtCore.QSize(53, 0))
self.btnCollapseAll_page3.setMaximumSize(QtCore.QSize(53, 16777215))
self.btnCollapseAll_page3.setObjectName(_fromUtf8("btnCollapseAll_page3"))
self.horizontalLayout_2.addWidget(self.btnCollapseAll_page3)
self.gridLayout_6.addLayout(self.horizontalLayout_2, 2, 0, 1, 1)
WizardSearchByInstanceName.addPage(self.wizardPage3)
self.wizardPage4 = QtGui.QWizardPage()
self.wizardPage4.setObjectName(_fromUtf8("wizardPage4"))
self.gridLayout_5 = QtGui.QGridLayout(self.wizardPage4)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.lblSelectMyModelClassToImportInstances = QtGui.QLabel(self.wizardPage4)
font = QtGui.QFont()
font.setPointSize(10)
self.lblSelectMyModelClassToImportInstances.setFont(font)
self.lblSelectMyModelClassToImportInstances.setObjectName(_fromUtf8("lblSelectMyModelClassToImportInstances"))
self.gridLayout_5.addWidget(self.lblSelectMyModelClassToImportInstances, 0, 0, 1, 1)
self.comboBoxMyModelClasses = QtGui.QComboBox(self.wizardPage4)
self.comboBoxMyModelClasses.setObjectName(_fromUtf8("comboBoxMyModelClasses"))
self.gridLayout_5.addWidget(self.comboBoxMyModelClasses, 1, 0, 1, 1)
WizardSearchByInstanceName.addPage(self.wizardPage4)
self.wizardPage5 = QtGui.QWizardPage()
self.wizardPage5.setObjectName(_fromUtf8("wizardPage5"))
self.gridLayout_4 = QtGui.QGridLayout(self.wizardPage5)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.lblShowPropertiesOfBothModels = QtGui.QLabel(self.wizardPage5)
font = QtGui.QFont()
font.setPointSize(10)
self.lblShowPropertiesOfBothModels.setFont(font)
self.lblShowPropertiesOfBothModels.setWordWrap(True)
self.lblShowPropertiesOfBothModels.setObjectName(_fromUtf8("lblShowPropertiesOfBothModels"))
self.gridLayout_4.addWidget(self.lblShowPropertiesOfBothModels, 0, 0, 1, 1)
self.tableWidgetViewPropertiesOfBothModels = QtGui.QTableWidget(self.wizardPage5)
self.tableWidgetViewPropertiesOfBothModels.setAutoFillBackground(False)
self.tableWidgetViewPropertiesOfBothModels.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tableWidgetViewPropertiesOfBothModels.setAlternatingRowColors(True)
self.tableWidgetViewPropertiesOfBothModels.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
self.tableWidgetViewPropertiesOfBothModels.setObjectName(_fromUtf8("tableWidgetViewPropertiesOfBothModels"))
self.tableWidgetViewPropertiesOfBothModels.setColumnCount(2)
self.tableWidgetViewPropertiesOfBothModels.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidgetViewPropertiesOfBothModels.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidgetViewPropertiesOfBothModels.setHorizontalHeaderItem(1, item)
self.tableWidgetViewPropertiesOfBothModels.horizontalHeader().setCascadingSectionResizes(False)
self.tableWidgetViewPropertiesOfBothModels.horizontalHeader().setDefaultSectionSize(300)
self.tableWidgetViewPropertiesOfBothModels.horizontalHeader().setMinimumSectionSize(300)
self.tableWidgetViewPropertiesOfBothModels.horizontalHeader().setStretchLastSection(True)
self.gridLayout_4.addWidget(self.tableWidgetViewPropertiesOfBothModels, 1, 0, 1, 1)
WizardSearchByInstanceName.addPage(self.wizardPage5)
self.wizardPage6 = QtGui.QWizardPage()
self.wizardPage6.setEnabled(True)
self.wizardPage6.setTitle(_fromUtf8(""))
self.wizardPage6.setObjectName(_fromUtf8("wizardPage6"))
self.gridLayout_3 = QtGui.QGridLayout(self.wizardPage6)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.lblPopulatedInstances = QtGui.QLabel(self.wizardPage6)
self.lblPopulatedInstances.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setPointSize(10)
self.lblPopulatedInstances.setFont(font)
self.lblPopulatedInstances.setWordWrap(True)
self.lblPopulatedInstances.setObjectName(_fromUtf8("lblPopulatedInstances"))
self.gridLayout_3.addWidget(self.lblPopulatedInstances, 0, 0, 1, 1)
self.treeWidgetSummary = QtGui.QTreeWidget(self.wizardPage6)
self.treeWidgetSummary.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.treeWidgetSummary.setAlternatingRowColors(True)
self.treeWidgetSummary.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
self.treeWidgetSummary.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.treeWidgetSummary.setAllColumnsShowFocus(False)
self.treeWidgetSummary.setObjectName(_fromUtf8("treeWidgetSummary"))
self.treeWidgetSummary.header().setVisible(False)
self.gridLayout_3.addWidget(self.treeWidgetSummary, 1, 0, 1, 1)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.lineEditSearchTreeSummary = QtGui.QLineEdit(self.wizardPage6)
self.lineEditSearchTreeSummary.setEnabled(True)
self.lineEditSearchTreeSummary.setObjectName(_fromUtf8("lineEditSearchTreeSummary"))
self.horizontalLayout_4.addWidget(self.lineEditSearchTreeSummary)
self.btnExpandAll_Summary = QtGui.QPushButton(self.wizardPage6)
self.btnExpandAll_Summary.setEnabled(True)
self.btnExpandAll_Summary.setMinimumSize(QtCore.QSize(53, 0))
self.btnExpandAll_Summary.setMaximumSize(QtCore.QSize(53, 16777215))
self.btnExpandAll_Summary.setObjectName(_fromUtf8("btnExpandAll_Summary"))
self.horizontalLayout_4.addWidget(self.btnExpandAll_Summary)
self.btnCollapseAll_Summary = QtGui.QPushButton(self.wizardPage6)
self.btnCollapseAll_Summary.setEnabled(True)
self.btnCollapseAll_Summary.setMinimumSize(QtCore.QSize(53, 0))
self.btnCollapseAll_Summary.setMaximumSize(QtCore.QSize(53, 16777215))
self.btnCollapseAll_Summary.setObjectName(_fromUtf8("btnCollapseAll_Summary"))
self.horizontalLayout_4.addWidget(self.btnCollapseAll_Summary)
self.gridLayout_3.addLayout(self.horizontalLayout_4, 2, 0, 1, 1)
WizardSearchByInstanceName.addPage(self.wizardPage6)
self.retranslateUi(WizardSearchByInstanceName)
QtCore.QMetaObject.connectSlotsByName(WizardSearchByInstanceName)
def retranslateUi(self, WizardSearchByInstanceName):
WizardSearchByInstanceName.setWindowTitle(_translate("WizardSearchByInstanceName", "Search by Existing Instance - Step 1", None))
self.lblSearchInstanceInEM.setText(_translate("WizardSearchByInstanceName", "Select an instance of <span style=\'font-size:9pt; font-weight:600; color:#000000;\'>My Model</span> to search for similar instances in the <span style=\'font-size:9pt; font-weight:600; color:#000000;\'>External Model</span>:", None))
self.treeWidgetSelectInstanceToSearchFor.setToolTip(_translate("WizardSearchByInstanceName", "Instances in My Model - Please select an instance to be used in search process", None))
self.treeWidgetSelectInstanceToSearchFor.setWhatsThis(_translate("WizardSearchByInstanceName", "Instances in my model - Please select an instance", None))
self.treeWidgetSelectInstanceToSearchFor.headerItem().setText(0, _translate("WizardSearchByInstanceName", "Instances", None))
self.treeWidgetSelectInstanceToSearchFor.headerItem().setText(1, _translate("WizardSearchByInstanceName", "Values", None))
self.lineEditSearchTreePage1.setPlaceholderText(_translate("WizardSearchByInstanceName", "Search tree...", None))
self.btnExpandAll_page1.setText(_translate("WizardSearchByInstanceName", "Expand", None))
self.btnCollapseAll_page1.setText(_translate("WizardSearchByInstanceName", "Collapse", None))
self.lblSearchForInstanceClassesFound.setText(_translate("WizardSearchByInstanceName", "Class found.", None))
self.treeWidgetSelectClasses.setToolTip(_translate("WizardSearchByInstanceName", "Classes found in External Model - Please select one or more classes to search for its/their instances in External Model", None))
self.treeWidgetSelectClasses.setWhatsThis(_translate("WizardSearchByInstanceName", "External model classes - Please select a class or more to search for instances", None))
self.treeWidgetSelectClasses.headerItem().setText(0, _translate("WizardSearchByInstanceName", "Instances", None))
self.btnAllPage2.setToolTip(_translate("WizardSearchByInstanceName", "Select all", None))
self.btnAllPage2.setText(_translate("WizardSearchByInstanceName", "All", None))
self.btnNonePage2.setToolTip(_translate("WizardSearchByInstanceName", "Select none", None))
self.btnNonePage2.setText(_translate("WizardSearchByInstanceName", "None", None))
self.lineEditSearchTreePage2.setPlaceholderText(_translate("WizardSearchByInstanceName", "Search tree...", None))
self.lblSearchForInstanceInstancesFound.setText(_translate("WizardSearchByInstanceName", "Selected classes and their instances.", None))
self.treeWidgetSelectInstancesToImport.setToolTip(_translate("WizardSearchByInstanceName", "Instances of selected classes found in External Model - Please select one or more instances to import to My Model", None))
self.treeWidgetSelectInstancesToImport.setWhatsThis(_translate("WizardSearchByInstanceName", "Instances in selected classes - Please select an instance or more to import to my model", None))
self.treeWidgetSelectInstancesToImport.headerItem().setText(0, _translate("WizardSearchByInstanceName", "Instances", None))
self.treeWidgetSelectInstancesToImport.headerItem().setText(1, _translate("WizardSearchByInstanceName", "Values", None))
self.btnAllPage3.setToolTip(_translate("WizardSearchByInstanceName", "Select all", None))
self.btnAllPage3.setText(_translate("WizardSearchByInstanceName", "All", None))
self.btnNonePage3.setToolTip(_translate("WizardSearchByInstanceName", "Select none", None))
self.btnNonePage3.setText(_translate("WizardSearchByInstanceName", "None", None))
self.lineEditSearchTreePage3.setPlaceholderText(_translate("WizardSearchByInstanceName", "Search tree...", None))
self.btnExpandAll_page3.setText(_translate("WizardSearchByInstanceName", "Expand", None))
self.btnCollapseAll_page3.setText(_translate("WizardSearchByInstanceName", "Collapse", None))
self.lblSelectMyModelClassToImportInstances.setText(_translate("WizardSearchByInstanceName", "Select a class from <span style=\'font-size:9pt; font-weight:600; color:#000000;\'>My Model</span> to import instances:", None))
self.comboBoxMyModelClasses.setToolTip(_translate("WizardSearchByInstanceName", "Select a class from My Model where selected instances will be imported", None))
self.comboBoxMyModelClasses.setWhatsThis(_translate("WizardSearchByInstanceName", "Select a class of my model to import new instances", None))
self.lblShowPropertiesOfBothModels.setText(_translate("WizardSearchByInstanceName", "Match properties of External Model to My Model", None))
self.tableWidgetViewPropertiesOfBothModels.setToolTip(_translate("WizardSearchByInstanceName", "Property Mapping table - Please match My Model properties to External Model properties", None))
self.tableWidgetViewPropertiesOfBothModels.setWhatsThis(_translate("WizardSearchByInstanceName", "Property mapping - Please match my model properties to external model properties", None))
item = self.tableWidgetViewPropertiesOfBothModels.horizontalHeaderItem(0)
item.setText(_translate("WizardSearchByInstanceName", "Properties of External Model", None))
item = self.tableWidgetViewPropertiesOfBothModels.horizontalHeaderItem(1)
item.setText(_translate("WizardSearchByInstanceName", "Properties of My Model", None))
self.lblPopulatedInstances.setText(_translate("WizardSearchByInstanceName", "TextLabel", None))
self.treeWidgetSummary.setWhatsThis(_translate("WizardSearchByInstanceName", "Instances found in external model - Please select one or more Instances to import to my model", None))
self.treeWidgetSummary.headerItem().setText(0, _translate("WizardSearchByInstanceName", "Instances", None))
self.treeWidgetSummary.headerItem().setText(1, _translate("WizardSearchByInstanceName", "Value", None))
self.lineEditSearchTreeSummary.setPlaceholderText(_translate("WizardSearchByInstanceName", "Search tree...", None))
self.btnExpandAll_Summary.setText(_translate("WizardSearchByInstanceName", "Expand", None))
self.btnCollapseAll_Summary.setText(_translate("WizardSearchByInstanceName", "Collapse", None))
import search_by_existing_instance_wizard_resources_rc
|
|
# Copyright 2016 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from mock import patch
import unittest
from networking_cisco.plugins.cisco.cpnr.cpnr_client import CpnrClient
class TestCpnrClient(unittest.TestCase):
def setUp(self):
super(TestCpnrClient, self).setUp()
self.mock_do_request = patch.object(CpnrClient, '_do_request').start()
def test_buildurl(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
expected_url = ('http://cpnr.com:8080/web-services/rest/'
'resource/t?vpnId=vpn1234?viewId=view123&'
'zoneOrigin=test.com')
return_url = mock_client._build_url('t', 'vpn1234', 'view123',
'test.com')
self.assertEqual(expected_url, return_url)
def test_get_dhcp_server(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_dhcp_server()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/'
'resource/DHCPServer')
def test_get_client_classes(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_client_classes()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/ClientClass')
def test_get_client_class(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_client_class('myclientclass')
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/'
'ClientClass/myclientclass')
def test_get_vpns(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_vpns()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/VPN')
def test_get_scopes(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_scopes()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/Scope?vpnId=.*')
def test_get_scope(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_scope('myscope')
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/Scope/myscope')
def test_get_client_entries(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_client_entries()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/ClientEntry')
def test_get_client_entry(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_client_entry('myclinetentry')
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/'
'ClientEntry/myclinetentry')
def test_get_leases(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_leases('vpn123')
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/'
'Lease?vpnId=vpn123')
def test_get_dns_server(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_dns_server()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/DNSServer')
def test_get_dns_forwarders(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_dns_forwarders()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/DnsForwarder')
def test_get_dns_forwarder(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_dns_forwarder('myforwarder')
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/'
'resource/DnsForwarder/myforwarder')
def test_get_dns_views(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_dns_views()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/DnsView')
def test_get_dns_view(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_dns_view('mydnsview')
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/'
'DnsView/mydnsview')
def test_get_ccm_zones(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_ccm_zones()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMZone?viewId=.*')
def test_get_ccm_zone(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_ccm_zone('myzone')
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMZone/myzone?viewId=.*')
def test_get_ccm_reverse_zones(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_ccm_reverse_zones()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMReverseZone?viewId=.*')
def test_get_ccm_reverse_zone(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_ccm_reverse_zone('myreversezone')
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMReverseZone/myreversezone?viewId=.*')
def test_get_ccm_hosts(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_ccm_hosts()
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMHost?viewId=.*&zoneOrigin=.*')
def test_get_ccm_host(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.get_ccm_host('myhost')
self.mock_do_request.assert_called_once_with('GET',
'http://cpnr.com:8080/web-services/rest/resource/CCMHost'
'/myhost?viewId=.*&zoneOrigin=.*')
def test_create_scope(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.create_scope('myscope')
self.mock_do_request.assert_called_once_with('POST',
'http://cpnr.com:8080/web-services/rest/resource/Scope',
'myscope')
def test_create_client_class(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.create_client_class('myclientclass')
self.mock_do_request.assert_called_once_with('POST',
'http://cpnr.com:8080/web-services/rest/resource/ClientClass',
'myclientclass')
def test_create_vpn(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.create_vpn('myvpn')
self.mock_do_request.assert_called_once_with('POST',
'http://cpnr.com:8080/web-services/rest/resource/VPN',
'myvpn')
def test_create_client_entry(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.create_client_entry('mycliententry')
self.mock_do_request.assert_called_once_with('POST',
'http://cpnr.com:8080/web-services/rest/resource/ClientEntry',
'mycliententry')
def test_create_dns_forwarder(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.create_dns_forwarder('mydnsforwarder')
self.mock_do_request.assert_called_once_with('POST',
'http://cpnr.com:8080/web-services/rest/resource/DnsForwarder',
'mydnsforwarder')
def test_create_dns_view(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.create_dns_view('mydnsview')
self.mock_do_request.assert_called_once_with('POST',
'http://cpnr.com:8080/web-services/rest/resource/DnsView',
'mydnsview')
def test_create_ccm_zone(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.create_ccm_zone('myccmzone')
self.mock_do_request.assert_called_once_with('POST',
'http://cpnr.com:8080/web-services/rest/resource/CCMZone',
'myccmzone')
def test_create_ccm_reverse_zone(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.create_ccm_reverse_zone('myccmreversezone')
self.mock_do_request.assert_called_once_with('POST',
'http://cpnr.com:8080/web-services/rest/resource/CCMReverseZone',
'myccmreversezone')
def test_create_ccm_host(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.create_ccm_host('myccmhost')
self.mock_do_request.assert_called_once_with('POST',
'http://cpnr.com:8080/web-services/rest/resource/CCMHost',
'myccmhost')
def test_update_dhcp_server(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_dhcp_server('updatedhcpserver')
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource/DHCPServer',
'updatedhcpserver')
def test_update_client_class(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_client_class('updateclientclass',
'newclientclass')
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource/'
'ClientClass/updateclientclass',
'newclientclass')
def test_update_vpn(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_vpn('updatevpn', 'newvpn')
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource/VPN/updatevpn',
'newvpn')
def test_update_scope(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_scope('updatescope', 'newscope')
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource/'
'Scope/updatescope',
'newscope')
def test_update_client_entry(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_client_entry('updatecliententry',
'newcliententry')
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource'
'/ClientEntry/updatecliententry',
'newcliententry')
def test_update_dns_server(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_dns_server('updatednsserver')
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource/DNSServer',
'updatednsserver')
def test_update_dns_forwarder(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_dns_forwarder('updatednsforwarder',
'newforwarder')
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource/'
'DnsForwarder/updatednsforwarder',
'newforwarder')
def test_update_dns_view(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_dns_view('updatednsview', 'newdnsview')
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource'
'/DnsView/updatednsview',
'newdnsview')
def test_update_ccm_zone(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_ccm_zone('updateccmzone', 'newzone',
None)
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMZone/updateccmzone',
'newzone')
def test_update_ccm_reverse_zone(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_ccm_reverse_zone(
'updateccmreversezone',
'newreversezone',
None)
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMReverseZone/updateccmreversezone',
'newreversezone')
def test_update_ccm_host(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.update_ccm_host('updateccmhost', 'newccmhost',
None)
self.mock_do_request.assert_called_once_with('PUT',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMHost/updateccmhost',
'newccmhost')
def test_delete_client_class(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.delete_client_class('deleteclientclass')
self.mock_do_request.assert_called_once_with('DELETE',
'http://cpnr.com:8080/web-services/rest/resource/'
'ClientClass/deleteclientclass')
def test_delete_vpn(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.delete_vpn('deletevpn')
self.mock_do_request.assert_called_once_with('DELETE',
'http://cpnr.com:8080/web-services/rest/resource/VPN/deletevpn')
def test_delete_scope(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.delete_scope('deletescope')
self.mock_do_request.assert_called_once_with('DELETE',
'http://cpnr.com:8080/web-services/rest/resource/'
'Scope/deletescope')
def test_delete_client_entry(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.delete_client_entry('deletecliententry')
self.mock_do_request.assert_called_once_with('DELETE',
'http://cpnr.com:8080/web-services/rest/resource/'
'ClientEntry/deletecliententry')
def test_delete_dns_forwarder(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.delete_dns_forwarder('deletednsforwarder')
self.mock_do_request.assert_called_once_with('DELETE',
'http://cpnr.com:8080/web-services/rest/resource/'
'DnsForwarder/deletednsforwarder')
def test_delete_dns_view(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.delete_dns_view('deletednsview')
self.mock_do_request.assert_called_once_with('DELETE',
'http://cpnr.com:8080/web-services/rest/resource/'
'DnsView/deletednsview')
def test_delete_ccm_zone(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.delete_ccm_zone('deleteccmzone')
self.mock_do_request.assert_called_once_with('DELETE',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMZone/deleteccmzone')
def test_delete_ccm_reverse_zone(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.delete_ccm_reverse_zone('delccmreversezone')
self.mock_do_request.assert_called_once_with('DELETE',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMReverseZone/delccmreversezone')
def test_delete_ccm_host(self):
mock_client = CpnrClient('http', 'cpnr.com', '8080', 'admin',
'admin', 0)
mock_client.delete_ccm_host('deleteccmhost')
self.mock_do_request.assert_called_once_with('DELETE',
'http://cpnr.com:8080/web-services/rest/resource/'
'CCMHost/deleteccmhost')
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# A simple python script to get active IPs on LAN managed
# with a CBN CG6640E router model. This script has been
# optimizated to firmware CG6640-3.5.1.10a-SH therefore
# using a different firmware version can cause errors.
#
# gNrg(at)tuta.io
#
import os
from contextlib import closing
from selenium.webdriver import Firefox # pip install selenium
from selenium.webdriver.support.ui import WebDriverWait
from bs4 import BeautifulSoup # easy_install beautifulsoup4
from getpass import getpass
# This function prints the script banner
def print_header():
os.system("clear")
print "+--------------------------------------------------+"
print "| |"
print "| CG6640E.py |"
print "| by gNrg |"
print "| |"
print "+--------------------------------------------------+"
print ""
# This function shows menu options
def show_menu_options():
print("\t1 - ) Show all clients")
print("\t2 - ) Show WiFi clients")
print("\t3 - ) Show Ethernet clients")
print("\t4 - ) Show clients and save")
print("\t5 - ) Exit")
print("\n")
option = raw_input(" Choose one of this options: ")
return option
# This function print a client using a pretty format
def print_client(client, known_macs):
n = get_known_MAC_name(str(client[1][0]), known_macs)
if n: name = n
else: name = 'UNKNOWN HOST!'
if client[0] != []: name += ' - ' + str(client[0][0])
print ""
print "+--------------------------------------------------+"
print "| Client: " + name
print "+--------------------------------------------------+"
print "| MAC: " + str(client[1][0])
print "| IP: " + str(client[2][0])
print "| Uptime: " + str(client[3][0])
if str(client[4][1]) == ' Ethernet': print "| Conn Type: " + 'Ethernet'
else:
print "| Conn Type: " + str(client[4][1])
print "| BSSID: " + str(client[4][4])
print "| Protocol: " + str(client[4][6])
print "| Addr Type: " + str(client[5][0])
print "+--------------------------------------------------+"
print ""
# This function return a string with a machine name if the MAC given exists into KnownMACs file
def get_known_MAC_name(MAC, known_macs):
name = ''
for m in known_macs:
if m[0] == MAC:
name = m[1]
return name
# This function return a string with all the information using a pretty format
def get_file_content(lan_clients_data, known_macs):
text = "LAN CLIENTS INFORMATION\n"
for client in lan_clients_data:
n = get_known_MAC_name(str(client[1][0]), known_macs)
if n: name = n
else: name = 'UNKNOWN HOST!'
if client[0] != []: name += ' - ' + str(client[0][0])
text += "+--------------------------------------------------+\n"
text += "| Client: " + name + '\n'
text += "+--------------------------------------------------+\n"
text += "| MAC: " + str(client[1][0]) + '\n'
text += "| IP: " + str(client[2][0]) + '\n'
text += "| Uptime: " + str(client[3][0]) + '\n'
if str(client[4][1]) == ' Ethernet': text += "| Conn Type: " + 'Ethernet\n'
else:
text += "| Conn Type: " + str(client[4][1]) + '\n'
text += "| BSSID: " + str(client[4][4]) + '\n'
text += "| Protocol: " + str(client[4][6]) + '\n'
text += "| Addr Type: " + str(client[5][0]) + '\n'
text += "+--------------------------------------------------+\n\n"
return text
# This function reads a KnownMACs file and return the values on a list
def get_known_macs():
known_macs = []
path = raw_input("Enter complete path of KnownMACs text file: ")
try:
file = open(path, 'r')
print "\nOpen file:\t\t\t[[ OK ]]"
file_content = file.read()
file_lines = []
i = 0
for line in file_content.split("\n"):
a, b = line.split(" ")
known_macs.append([])
known_macs[i].append(a)
known_macs[i].append(b)
i += 1
print "Getting file information:\t[[ OK ]]"
file.close()
print "Close file:\t\t\t[[ OK ]]"
return known_macs
except IOError:
print("Open/Read file: [[ ERROR ]]")
print("\tCheck if the path is correct.")
print("\tCheck if do you have permissions for read the file.")
print("\tThen, try again.")
sys.exit(0)
print_header()
# Get user information
user = raw_input('\nEnter username [Left blank for default]: ')
password = getpass('Enter password [Left blank for default]: ')
ip = raw_input('Router IP [Left blank for 192.168.1.1]: ')
if not user: user = 'admin'
if not password: password = 'admin'
if not ip: ip = '192.168.1.1'
# Get KnownMACs file content
known_macs = []
load_mac_file = raw_input('\nDo you want to load KnownMACs file?[y/N]: ')
if load_mac_file == 'y' or load_mac_file == 'Y':
known_macs = get_known_macs() # MAC [0] - Name [1]
print_header()
login_url = "http://" + ip + "/login/Login.txt?password=" + password + "&user=" + user
target_url = "http://" + ip + "/basicLanUsers.html"
print "\nGetting information from the router..."
# Use firefox to get page with javascript generated content
with closing(Firefox()) as browser:
browser.get(login_url)
browser.get(target_url)
# button = browser.find_element_by_name('button')
# button.click()
# Wait for the page/js to load
try:
WebDriverWait(browser, timeout=10).until(
lambda x: x.find_element_by_name('instance'))
except:
print "Getting info:\t\t[[ ERROR ]]\n"
exit(-1)
# Store it to string variable
page_source = browser.page_source
print "Getting info:\t\t[[ OK ]]\n"
# Format the string to navigate it using BeautifulSoup
soup = BeautifulSoup(page_source, 'lxml')
lan_clients = soup.find_all(attrs={"name": "instance"})
lan_clients_data = []
client_data = []
# Remove blanks from list
for client in lan_clients:
unformatted_client_data = client.contents
for data in unformatted_client_data:
if data != u' ':
client_data.append(data.contents)
lan_clients_data.append(client_data)
client_data = []
if len(lan_clients_data) > 0:
print "Clients Found:\t\t[ " + str(len(lan_clients_data)) + " ]\n"
else:
print "Clients Found:\t\t[ 0 ]\n"
exit(0)
# Show&Process menu options
option = show_menu_options()
print_header()
if option == '1':
for lcd in lan_clients_data:
print_client(lcd, known_macs)
elif option == '2':
for lcd in lan_clients_data:
if lcd[4][1] != ' Ethernet':
print_client(lcd, known_macs)
elif option == '3':
for lcd in lan_clients_data:
if lcd[4][1] == ' Ethernet':
print_client(lcd, known_macs)
elif option == '4':
for lcd in lan_clients_data:
print_client(lcd, known_macs)
path = raw_input("WARNING!!! If the file exists, it will be overwrite.\nEnter complete path and name of new text file: ")
try:
file = open(path, 'w')
print "\nCreating/Open file:\t[[ OK ]]"
file_content = get_file_content(lan_clients_data, known_macs)
file.write(file_content)
print "Writing file:\t\t[[ OK ]]"
file.close()
print "Close file:\t\t[[ OK ]]"
print "You can see the new file on: " + path
except IOError:
print("Creating/Open file: [[ ERROR ]]")
print("\tCheck if the path is correct.")
print("\tCheck if do you have permissions for create files in this folder.")
print("\tThen, try again.")
sys.exit(0)
elif option == '5': exit(0)
|
|
import os
import shutil
from pyquery import PyQuery
from StringIO import StringIO
from textwrap import wrap
import django.test
from django.conf import settings
from django.core.urlresolvers import reverse as urlreverse
from ietf.utils.test_utils import login_testing_unauthorized
from ietf.utils.test_data import make_test_data
from ietf.utils.mail import outbox
from ietf.doc.utils import create_ballot_if_not_open
from ietf.doc.views_conflict_review import default_approval_text
from ietf.doc.models import Document,DocEvent,NewRevisionDocEvent,BallotPositionDocEvent,TelechatDocEvent,DocAlias,State
from ietf.name.models import StreamName
from ietf.group.models import Person
from ietf.iesg.models import TelechatDate
class ConflictReviewTestCase(django.test.TestCase):
fixtures = ['names']
def test_start_review(self):
doc = Document.objects.get(name='draft-imaginary-independent-submission')
url = urlreverse('conflict_review_start',kwargs=dict(name=doc.name))
login_testing_unauthorized(self, "secretary", url)
# can't start conflict reviews on documents not in the ise or irtf streams
r = self.client.get(url)
self.assertEquals(r.status_code, 404)
doc.stream=StreamName.objects.get(slug='ise')
doc.save()
# normal get should succeed and get a reasonable form
r = self.client.get(url)
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertEquals(len(q('form select[name=create_in_state]')),1)
# faulty posts
r = self.client.post(url,dict(create_in_state=""))
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertTrue(len(q('form ul.errorlist')) > 0)
self.assertEquals(Document.objects.filter(name='conflict-review-imaginary-independent-submission').count() , 0)
r = self.client.post(url,dict(ad=""))
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertTrue(len(q('form ul.errorlist')) > 0)
self.assertEquals(Document.objects.filter(name='conflict-review-imaginary-independent-submission').count() , 0)
# successful review start
ad_strpk = str(Person.objects.get(name='Aread Irector').pk)
state_strpk = str(State.objects.get(used=True, slug='needshep',type__slug='conflrev').pk)
r = self.client.post(url,dict(ad=ad_strpk,create_in_state=state_strpk,notify='ipu@ietf.org'))
self.assertEquals(r.status_code, 302)
review_doc = Document.objects.get(name='conflict-review-imaginary-independent-submission')
self.assertEquals(review_doc.get_state('conflrev').slug,'needshep')
self.assertEquals(review_doc.rev,u'00')
self.assertEquals(review_doc.ad.name,u'Aread Irector')
self.assertEquals(review_doc.notify,u'ipu@ietf.org')
doc = Document.objects.get(name='draft-imaginary-independent-submission')
self.assertTrue(doc in [x.target.document for x in review_doc.relateddocument_set.filter(relationship__slug='conflrev')])
self.assertTrue(review_doc.latest_event(DocEvent,type="added_comment").desc.startswith("IETF conflict review requested"))
self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith("IETF conflict review initiated"))
# verify you can't start a review when a review is already in progress
r = self.client.post(url,dict(ad="Aread Irector",create_in_state="Needs Shepherd",notify='ipu@ietf.org'))
self.assertEquals(r.status_code, 404)
def test_change_state(self):
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
url = urlreverse('conflict_review_change_state',kwargs=dict(name=doc.name))
login_testing_unauthorized(self, "ad", url)
# normal get
r = self.client.get(url)
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertEquals(len(q('form select[name=review_state]')),1)
# faulty post
r = self.client.post(url,dict(review_state=""))
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertTrue(len(q('form ul.errorlist')) > 0)
# successful change to AD Review
adrev_pk = str(State.objects.get(used=True, slug='adrev',type__slug='conflrev').pk)
r = self.client.post(url,dict(review_state=adrev_pk,comment='RDNK84ZD'))
self.assertEquals(r.status_code, 302)
review_doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertEquals(review_doc.get_state('conflrev').slug,'adrev')
self.assertTrue(review_doc.latest_event(DocEvent,type="added_comment").desc.startswith('RDNK84ZD'))
self.assertFalse(review_doc.active_ballot())
# successful change to IESG Evaluation
iesgeval_pk = str(State.objects.get(used=True, slug='iesgeval',type__slug='conflrev').pk)
r = self.client.post(url,dict(review_state=iesgeval_pk,comment='TGmZtEjt'))
self.assertEquals(r.status_code, 302)
review_doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertEquals(review_doc.get_state('conflrev').slug,'iesgeval')
self.assertTrue(review_doc.latest_event(DocEvent,type="added_comment").desc.startswith('TGmZtEjt'))
self.assertTrue(review_doc.active_ballot())
self.assertEquals(review_doc.latest_event(BallotPositionDocEvent, type="changed_ballot_position").pos_id,'yes')
def test_edit_notices(self):
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
url = urlreverse('conflict_review_notices',kwargs=dict(name=doc.name))
login_testing_unauthorized(self, "ad", url)
# normal get
r = self.client.get(url)
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertEquals(len(q('form input[name=notify]')),1)
self.assertEquals(doc.notify,q('form input[name=notify]')[0].value)
# change notice list
newlist = '"Foo Bar" <foo@bar.baz.com>'
r = self.client.post(url,dict(notify=newlist))
self.assertEquals(r.status_code,302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertEquals(doc.notify,newlist)
self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith('Notification list changed'))
def test_edit_ad(self):
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
url = urlreverse('conflict_review_ad',kwargs=dict(name=doc.name))
login_testing_unauthorized(self, "ad", url)
# normal get
r = self.client.get(url)
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertEquals(len(q('select[name=ad]')),1)
# change ads
ad2 = Person.objects.get(name='Ad No2')
r = self.client.post(url,dict(ad=str(ad2.pk)))
self.assertEquals(r.status_code,302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertEquals(doc.ad,ad2)
self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith('Shepherding AD changed'))
def test_edit_telechat_date(self):
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
url = urlreverse('conflict_review_telechat_date',kwargs=dict(name=doc.name))
login_testing_unauthorized(self, "ad", url)
# normal get
r = self.client.get(url)
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertEquals(len(q('select[name=telechat_date]')),1)
# set a date
self.assertFalse(doc.latest_event(TelechatDocEvent, "scheduled_for_telechat"))
telechat_date = TelechatDate.objects.active().order_by('date')[0].date
r = self.client.post(url,dict(telechat_date=telechat_date.isoformat()))
self.assertEquals(r.status_code,302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertEquals(doc.latest_event(TelechatDocEvent, "scheduled_for_telechat").telechat_date,telechat_date)
# move it forward a telechat (this should set the returning item bit)
telechat_date = TelechatDate.objects.active().order_by('date')[1].date
r = self.client.post(url,dict(telechat_date=telechat_date.isoformat()))
self.assertEquals(r.status_code,302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertTrue(doc.returning_item())
# clear the returning item bit
r = self.client.post(url,dict(telechat_date=telechat_date.isoformat()))
self.assertEquals(r.status_code,302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertFalse(doc.returning_item())
# set the returning item bit without changing the date
r = self.client.post(url,dict(telechat_date=telechat_date.isoformat(),returning_item="on"))
self.assertEquals(r.status_code,302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertTrue(doc.returning_item())
# Take the doc back off any telechat
r = self.client.post(url,dict(telechat_date=""))
self.assertEquals(r.status_code, 302)
self.assertEquals(doc.latest_event(TelechatDocEvent, "scheduled_for_telechat").telechat_date,None)
def approve_test_helper(self,approve_type):
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
url = urlreverse('conflict_review_approve',kwargs=dict(name=doc.name))
login_testing_unauthorized(self, "secretary", url)
# Some additional setup
create_ballot_if_not_open(doc,Person.objects.get(name="Sec Retary"),"conflrev")
doc.set_state(State.objects.get(used=True, slug=approve_type+'-pend',type='conflrev'))
doc.save()
# get
r = self.client.get(url)
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertEquals(len(q('form.approve')),1)
if approve_type == 'appr-noprob':
self.assertTrue( 'IESG has no problem' in ''.join(wrap(r.content,2**16)))
else:
self.assertTrue( 'NOT be published' in ''.join(wrap(r.content,2**16)))
# submit
messages_before = len(outbox)
r = self.client.post(url,dict(announcement_text=default_approval_text(doc)))
self.assertEquals(r.status_code, 302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertEquals(doc.get_state_slug(),approve_type+'-sent')
self.assertFalse(doc.ballot_open("conflrev"))
self.assertEquals(len(outbox), messages_before + 1)
self.assertTrue('Results of IETF-conflict review' in outbox[-1]['Subject'])
if approve_type == 'appr-noprob':
self.assertTrue( 'IESG has no problem' in ''.join(wrap(unicode(outbox[-1]),2**16)))
else:
self.assertTrue( 'NOT be published' in ''.join(wrap(unicode(outbox[-1]),2**16)))
def test_approve_reqnopub(self):
self.approve_test_helper('appr-reqnopub')
def test_approve_noprob(self):
self.approve_test_helper('appr-noprob')
def setUp(self):
make_test_data()
class ConflictReviewSubmitTestCase(django.test.TestCase):
fixtures = ['names']
def test_initial_submission(self):
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
url = urlreverse('conflict_review_submit',kwargs=dict(name=doc.name))
login_testing_unauthorized(self, "ad", url)
# normal get
r = self.client.get(url)
self.assertEquals(r.status_code,200)
q = PyQuery(r.content)
self.assertTrue(q('textarea')[0].text.startswith("[Edit this page"))
# Faulty posts using textbox
# Right now, nothing to test - we let people put whatever the web browser will let them put into that textbox
# sane post using textbox
path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
self.assertEquals(doc.rev,u'00')
self.assertFalse(os.path.exists(path))
r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1"))
self.assertEquals(r.status_code,302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertEquals(doc.rev,u'00')
with open(path) as f:
self.assertEquals(f.read(),"Some initial review text\n")
f.close()
self.assertTrue( "submission-00" in doc.latest_event(NewRevisionDocEvent).desc)
def test_subsequent_submission(self):
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
url = urlreverse('conflict_review_submit',kwargs=dict(name=doc.name))
login_testing_unauthorized(self, "ad", url)
# A little additional setup
# doc.rev is u'00' per the test setup - double-checking that here - if it fails, the breakage is in setUp
self.assertEquals(doc.rev,u'00')
path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
with open(path,'w') as f:
f.write('This is the old proposal.')
f.close()
# normal get
r = self.client.get(url)
self.assertEquals(r.status_code,200)
q = PyQuery(r.content)
self.assertTrue(q('textarea')[0].text.startswith("This is the old proposal."))
# faulty posts trying to use file upload
# Copied from wgtracker tests - is this really testing the server code, or is it testing
# how client.post populates Content-Type?
test_file = StringIO("\x10\x11\x12") # post binary file
test_file.name = "unnamed"
r = self.client.post(url, dict(txt=test_file,submit_response="1"))
self.assertEquals(r.status_code, 200)
self.assertTrue("does not appear to be a text file" in r.content)
# sane post uploading a file
test_file = StringIO("This is a new proposal.")
test_file.name = "unnamed"
r = self.client.post(url,dict(txt=test_file,submit_response="1"))
self.assertEquals(r.status_code, 302)
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
self.assertEquals(doc.rev,u'01')
path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
with open(path) as f:
self.assertEquals(f.read(),"This is a new proposal.")
f.close()
self.assertTrue( "submission-01" in doc.latest_event(NewRevisionDocEvent).desc)
# verify reset text button works
r = self.client.post(url,dict(reset_text="1"))
self.assertEquals(r.status_code, 200)
q = PyQuery(r.content)
self.assertTrue(q('textarea')[0].text.startswith("[Edit this page"))
def setUp(self):
make_test_data()
self.test_dir = os.path.abspath("tmp-conflict-review-testdir")
os.mkdir(self.test_dir)
settings.CONFLICT_REVIEW_PATH = self.test_dir
def tearDown(self):
shutil.rmtree(self.test_dir)
|
|
"""
@file
@brief Compute metrics in for a competition
"""
import os
import sys
if sys.version_info[0] == 2:
FileNotFoundError = Exception
def main_codalab_wrapper_binary_classification(fct, metric_name, argv, truth_file="truth.txt",
submission_file="answer.txt", output_file="scores.txt"):
"""
adapt the tempate available at
`evaluate.py <https://github.com/Tivix/competition-examples/blob/master/hello_world/competition/scoring_program/evaluate.py>`_
"""
input_dir = argv[1]
output_dir = argv[2]
submit_dir = os.path.join(input_dir, 'res')
truth_dir = os.path.join(input_dir, 'ref')
if not os.path.isdir(submit_dir):
raise FileNotFoundError("%s doesn't exist" % submit_dir)
if os.path.isdir(submit_dir) and os.path.isdir(truth_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
private_codalab_wrapper_binary_classification(fct, metric_name,
fold1=truth_dir, f1=truth_file,
fold2=submit_dir, f2=submission_file,
output=os.path.join(output_dir, output_file))
else:
raise FileNotFoundError(
"{0} or {1} is not a folder".format(submit_dir, truth_dir))
def private_codalab_wrapper_binary_classification(fct, metric_name, fold1, fold2, f1="answer.txt", f2="answer.txt",
output="scores.txt", use_print=False):
"""
Wraps the function following the guidelines
`User_Building a Scoring Program for a Competition <https://github.com/codalab/codalab-competitions/wiki/User_Building-a-Scoring-Program-for-a-Competition>`_.
It replicates the example available at
`competition-examples/hello_world <https://github.com/Tivix/competition-examples/tree/master/hello_world/competition>`_.
@param fct function to wrap
@param metric_name metric name
@param fold1 folder which contains the data for folder containing the truth
@param fold2 folder which contains the data for folder containing the data
@param f1 filename for the truth
@param f2 filename for the produced answers
@param output produces an output with the expected results
@param use_print display intermediate results
@return metric
"""
f1 = os.path.join(fold1, f1)
f2 = os.path.join(fold2, f2)
if not os.path.exists(f1):
raise FileNotFoundError("unable to find '{0}'".format(f1))
if not os.path.exists(f2):
raise FileNotFoundError("unable to find '{0}'".format(f2))
if f1 == f2:
raise ValueError(
"answers and scores are the same file: '{0}'".format(f1))
with open(f1, "r") as f:
lines = f.readlines()
answers = [float(_) for _ in lines if _]
if use_print:
print("Reading answers:", f1, len(answers), "rows")
print("First answers:", answers[:10])
with open(f2, "r") as f:
lines = f.readlines()
scores = [float(_) for _ in lines if _]
if use_print:
print("Reading scores:", f1, len(scores), "rows")
print("First scores:", scores[:10])
metric = fct(answers, scores)
res = "{0}:{1}".format(metric_name, metric)
if use_print:
print("Results=", res)
with open(output, "w") as f:
f.write(res)
if use_print:
print("Wrote", res, "in", output)
return metric
def AUC(answers, scores):
"""
Compute the `AUC <https://en.wikipedia.org/wiki/Area_under_the_curve_(pharmacokinetics)>`_.
@param answers expected answers 0 (false), 1 (true)
@param scores score obtained for class 1
@return number
"""
ab = list(zip(answers, scores))
plus = [s for a, s in ab if a == 1]
moins = [s for a, s in ab if a != 1]
auc = 0
for p in plus:
for m in moins:
if p > m:
auc += 2
elif p == m:
auc += 1
den = len(plus) * len(moins)
if den == 0:
return 1.0 if len(moins) == 0 else 0.0
return auc * 1.0 / (len(plus) * len(moins) * 2)
def AUC_multi(answers, scores, ignored=None):
"""
Compute the `AUC <https://en.wikipedia.org/wiki/Area_under_the_curve_(pharmacokinetics)>`_.
@param answers expected answers `class` as a string
@param scores prediction and score `(class, score)`
@param ignored ignored class
@return number
"""
if ignored is None:
ignored = []
new_answers = [(1 if s[0] == a else 0)
for (a, s) in zip(answers, scores) if a not in ignored]
return AUC(new_answers, scores)
def AUC_multi_multi(nb, answers, scores, ignored=None):
"""
Compute the `AUC <https://en.wikipedia.org/wiki/Area_under_the_curve_(pharmacokinetics)>`_.
@param answers expected answers, list of tuple of classes as a string
@param scores prediction and score `(class, score)`
@param ignored ignored class
@return number
Dummy expected classes (both classes):
::
endettement 4.0
surendettement 4.0
surendettement 4.0
surendettement 4.0
Dummy predicted answers:
::
2.0 endettement 0.48775936896183714 0.5033579692108108
5.0 microcredit social 0.16592396695909017 0.8643847837801871
5.0 microcredit personnel 0.7962830470795325 0.6233706526012659
3.0 impayes 0.17370233487556486 0.779432954126955
"""
res = []
for i in range(0, nb):
ta = [a[i] for a in answers]
ts = [(a[i], a[nb + i]) for a in scores]
auc = AUC_multi(ta, ts, ignored)
err = sum(1 if a != s[0] else 0 for (a, s) in zip(ta, ts))
res.append(err * 1.0 / len(ta))
res.append(auc)
return res
def private_codalab_wrapper_multi_classification(fct, variables_name, fold1, fold2, f1="answer.txt", f2="answer.txt",
output="scores.txt", use_print=False, ignored=None):
"""
Wraps the function following the guidelines
`User_Building a Scoring Program for a Competition <https://github.com/codalab/codalab-competitions/wiki/User_Building-a-Scoring-Program-for-a-Competition>`_.
It replicates the example available at
`competition-examples/hello_world <https://github.com/Tivix/competition-examples/tree/master/hello_world/competition>`_.
@param fct function to wrap
@param variables_name variables names
@param fold1 folder which contains the data for folder containing the truth
@param fold2 folder which contains the data for folder containing the data
@param f1 filename for the truth
@param f2 filename for the produced answers
@param output produces an output with the expected results
@param use_print display intermediate results
@param ignored ignored labels
@return metric
"""
f1 = os.path.join(fold1, f1)
f2 = os.path.join(fold2, f2)
if not os.path.exists(f1):
raise FileNotFoundError("unable to find '{0}'".format(f1))
if not os.path.exists(f2):
raise FileNotFoundError("unable to find '{0}'".format(f2))
if f1 == f2:
raise ValueError(
"answers and scores are the same file: '{0}'".format(f1))
def pair_process(row):
for i in range(len(row) // 2, len(row)):
row[i] = float(row[i])
return row
with open(f1, "r") as f:
lines = f.readlines()
answers = [_.strip(" \r\n").split("\t") for _ in lines if _]
if use_print:
print("Reading answers:", f1, len(answers), "rows")
print("First answers:", answers[:10])
with open(f2, "r") as f:
lines = f.readlines()
scores = [pair_process(_.strip(" \r\n").split("\t")) for _ in lines if _]
if use_print:
print("Reading scores:", f1, len(scores), "rows")
print("First scores:", scores[:10])
metric = fct(len(variables_name), answers, scores, ignored=ignored)
all_names = []
for v in variables_name:
all_names.append("%s_ERR" % v)
all_names.append("%s_AUC" % v)
res = "\n".join(["{0}:{1}".format(mn, m)
for (mn, m) in zip(all_names, metric)])
if use_print:
print("Results=", res)
with open(output, "w") as f:
f.write(res)
if use_print:
print("Wrote", res, "in", output)
return metric
def main_codalab_wrapper_multi_classification(fct, variables_name, argv, truth_file="truth.txt",
submission_file="answer.txt", output_file="scores.txt"):
"""
adapt the tempate available at
`evaluate.py <https://github.com/Tivix/competition-examples/blob/master/hello_world/competition/scoring_program/evaluate.py>`_
"""
input_dir = argv[1]
output_dir = argv[2]
submit_dir = os.path.join(input_dir, 'res')
truth_dir = os.path.join(input_dir, 'ref')
if not os.path.isdir(submit_dir):
raise FileNotFoundError("%s doesn't exist" % submit_dir)
if os.path.isdir(submit_dir) and os.path.isdir(truth_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
private_codalab_wrapper_multi_classification(fct, variables_name,
fold1=truth_dir, f1=truth_file,
fold2=submit_dir, f2=submission_file,
output=os.path.join(
output_dir, output_file),
ignored=["nul"])
else:
raise FileNotFoundError(
"{0} or {1} is not a folder".format(submit_dir, truth_dir))
if __name__ == "__main__":
if len(sys.argv) < 3:
raise Exception("bad arguments: {0}".format(sys.argv))
main_codalab_wrapper_multi_classification(
AUC_multi_multi, ["orientation"], sys.argv)
|
|
#!/usr/bin/env python
import argparse
import cv2
from numpy import empty, nan
import os
import sys
import time
import CMT
import numpy as np
import util
CMT = CMT.CMT()
parser = argparse.ArgumentParser(description='Track an object.')
parser.add_argument('inputpath', nargs='?', help='The input path.')
parser.add_argument('--challenge', dest='challenge', action='store_true', help='Enter challenge mode.')
parser.add_argument('--preview', dest='preview', action='store_const', const=True, default=None, help='Force preview')
parser.add_argument('--no-preview', dest='preview', action='store_const', const=False, default=None, help='Disable preview')
parser.add_argument('--no-scale', dest='estimate_scale', action='store_false', help='Disable scale estimation')
parser.add_argument('--with-rotation', dest='estimate_rotation', action='store_true', help='Enable rotation estimation')
parser.add_argument('--bbox', dest='bbox', help='Specify initial bounding box.')
parser.add_argument('--pause', dest='pause', action='store_true', help='Specify initial bounding box.')
parser.add_argument('--output-dir', dest='output', help='Specify a directory for output data.')
parser.add_argument('--quiet', dest='quiet', action='store_true', help='Do not show graphical output (Useful in combination with --output-dir ).')
parser.add_argument('--skip', dest='skip', action='store', default=None, help='Skip the first n frames', type=int)
args = parser.parse_args()
CMT.estimate_scale = args.estimate_scale
CMT.estimate_rotation = args.estimate_rotation
if args.pause:
pause_time = 0
else:
pause_time = 10
if args.output is not None:
if not os.path.exists(args.output):
os.mkdir(args.output)
elif not os.path.isdir(args.output):
raise Exception(args.output + ' exists, but is not a directory')
if args.challenge:
with open('images.txt') as f:
images = [line.strip() for line in f]
init_region = np.genfromtxt('region.txt', delimiter=',')
num_frames = len(images)
results = empty((num_frames, 4))
results[:] = nan
results[0, :] = init_region
frame = 0
im0 = cv2.imread(images[frame])
im_gray0 = cv2.cvtColor(im0, cv2.COLOR_BGR2GRAY)
im_draw = np.copy(im0)
tl, br = (util.array_to_int_tuple(init_region[:2]), util.array_to_int_tuple(init_region[:2] + init_region[2:4]))
try:
CMT.initialise(im_gray0, tl, br)
while frame < num_frames:
im = cv2.imread(images[frame])
im_gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
CMT.process_frame(im_gray)
results[frame, :] = CMT.bb
# Advance frame number
frame += 1
except:
pass # Swallow errors
np.savetxt('output.txt', results, delimiter=',')
else:
# Clean up
cv2.destroyAllWindows()
preview = args.preview
if args.inputpath is not None:
# If a path to a file was given, assume it is a single video file
if os.path.isfile(args.inputpath):
cap = cv2.VideoCapture(args.inputpath)
#Skip first frames if required
if args.skip is not None:
cap.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, args.skip)
# Otherwise assume it is a format string for reading images
else:
cap = util.FileVideoCapture(args.inputpath)
#Skip first frames if required
if args.skip is not None:
cap.frame = 1 + args.skip
# By default do not show preview in both cases
if preview is None:
preview = False
else:
# If no input path was specified, open camera device
cap = cv2.VideoCapture(0)
if preview is None:
preview = True
# Check if videocapture is working
if not cap.isOpened():
print 'Unable to open video input.'
sys.exit(1)
while preview:
status, im = cap.read()
cv2.imshow('Preview', im)
k = cv2.waitKey(10)
if not k == -1:
break
# Read first frame
status, im0 = cap.read()
im_gray0 = cv2.cvtColor(im0, cv2.COLOR_BGR2GRAY)
im_draw = np.copy(im0)
if args.bbox is not None:
# Try to disassemble user specified bounding box
values = args.bbox.split(',')
try:
values = [int(v) for v in values]
except:
raise Exception('Unable to parse bounding box')
if len(values) != 4:
raise Exception('Bounding box must have exactly 4 elements')
bbox = np.array(values)
# Convert to point representation, adding singleton dimension
bbox = util.bb2pts(bbox[None, :])
# Squeeze
bbox = bbox[0, :]
tl = bbox[:2]
br = bbox[2:4]
else:
# Get rectangle input from user
(tl, br) = util.get_rect(im_draw)
print 'using', tl, br, 'as init bb'
CMT.initialise(im_gray0, tl, br)
frame = 1
while True:
# Read image
status, im = cap.read()
if not status:
break
im_gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
im_draw = np.copy(im)
tic = time.time()
CMT.process_frame(im_gray)
toc = time.time()
# Display results
# Draw updated estimate
if CMT.has_result:
cv2.line(im_draw, CMT.tl, CMT.tr, (255, 0, 0), 4)
cv2.line(im_draw, CMT.tr, CMT.br, (255, 0, 0), 4)
cv2.line(im_draw, CMT.br, CMT.bl, (255, 0, 0), 4)
cv2.line(im_draw, CMT.bl, CMT.tl, (255, 0, 0), 4)
util.draw_keypoints(CMT.tracked_keypoints, im_draw, (255, 255, 255))
# this is from simplescale
util.draw_keypoints(CMT.votes[:, :2], im_draw) # blue
util.draw_keypoints(CMT.outliers[:, :2], im_draw, (0, 0, 255))
if args.output is not None:
# Original image
cv2.imwrite('{0}/input_{1:08d}.png'.format(args.output, frame), im)
# Output image
cv2.imwrite('{0}/output_{1:08d}.png'.format(args.output, frame), im_draw)
# Keypoints
with open('{0}/keypoints_{1:08d}.csv'.format(args.output, frame), 'w') as f:
f.write('x y\n')
np.savetxt(f, CMT.tracked_keypoints[:, :2], fmt='%.2f')
# Outlier
with open('{0}/outliers_{1:08d}.csv'.format(args.output, frame), 'w') as f:
f.write('x y\n')
np.savetxt(f, CMT.outliers, fmt='%.2f')
# Votes
with open('{0}/votes_{1:08d}.csv'.format(args.output, frame), 'w') as f:
f.write('x y\n')
np.savetxt(f, CMT.votes, fmt='%.2f')
# Bounding box
with open('{0}/bbox_{1:08d}.csv'.format(args.output, frame), 'w') as f:
f.write('x y\n')
# Duplicate entry tl is not a mistake, as it is used as a drawing instruction
np.savetxt(f, np.array((CMT.tl, CMT.tr, CMT.br, CMT.bl, CMT.tl)), fmt='%.2f')
if not args.quiet:
cv2.imshow('main', im_draw)
# Check key input
k = cv2.waitKey(pause_time)
key = chr(k & 255)
if key == 'q':
break
if key == 'd':
import ipdb; ipdb.set_trace()
# Remember image
im_prev = im_gray
# Advance frame number
frame += 1
print '{5:04d}: center: {0:.2f},{1:.2f} scale: {2:.2f}, active: {3:03d}, {4:04.0f}ms'.format(CMT.center[0], CMT.center[1], CMT.scale_estimate, CMT.active_keypoints.shape[0], 1000 * (toc - tic), frame)
|
|
#
# Copyright (c) 2014 Juniper Networks, Inc. All rights reserved.
#
"""
This file contains implementation of netconf interface for QFX physical router
configuration manager
"""
from db import *
from dm_utils import DMUtils
from juniper_conf import JuniperConf
from juniper_conf import JunosInterface
from device_api.juniper_common_xsd import *
import abc
class QfxConf(JuniperConf):
_FAMILY_MAP = {
'route-target': '',
'e-vpn': FamilyEvpn(signaling='')
}
@classmethod
def is_product_supported(cls, name, role):
if role and role.lower().startswith('e2-'):
return False
for product in cls._products or []:
if name.lower().startswith(product.lower()):
return True
return False
# end is_product_supported
def __init__(self):
super(QfxConf, self).__init__()
self.evpn = None
self.global_switch_options_config = None
self.vlans_config = None
# end __init__
def is_spine(self):
if self.physical_router.physical_router_role == 'spine':
return True
return False
# end is_spine
def initialize(self):
self.evpn = None
self.global_switch_options_config = None
self.chassis_config = None
self.vlans_config = None
self.irb_interfaces = []
self.internal_vn_ris = []
super(QfxConf, self).initialize()
# end initialize
def add_families(self, parent, params):
if params.get('address_families') is None:
return
families = params['address_families'].get('family', [])
if not families:
return
family_etree = Family()
parent.set_family(family_etree)
for family in families:
fam = family.replace('-', '_')
if family in ['e-vpn', 'e_vpn']:
fam = 'evpn'
if family in self._FAMILY_MAP:
getattr(family_etree, "set_" + fam)(self._FAMILY_MAP[family])
else:
self._logger.info("DM does not support address family: %s on QFX" % fam)
# end add_families
def attach_irb(self, ri_conf, ri):
if not self.is_spine():
return
is_l2 = ri_conf.get("is_l2", False)
is_l2_l3 = ri_conf.get("is_l2_l3", False)
vni = ri_conf.get("vni", None)
network_id = ri_conf.get("network_id", None)
if (is_l2 and vni is not None and
self.is_family_configured(self.bgp_params, "e-vpn")):
if is_l2_l3:
self.irb_interfaces.append("irb." + str(network_id))
# end attach_irb
def set_internal_vn_irb_config(self):
if self.internal_vn_ris and self.irb_interfaces:
for int_ri in self.internal_vn_ris:
lr_uuid = DMUtils.extract_lr_uuid_from_internal_vn_name(int_ri.name)
lr = LogicalRouterDM.get(lr_uuid)
if not lr:
continue
vn_list = lr.get_connected_networks(include_internal=False)
for vn in vn_list:
vn_obj = VirtualNetworkDM.get(vn)
irb_name = "irb." + str(vn_obj.vn_network_id)
if irb_name in self.irb_interfaces:
int_ri.add_interface(Interface(name=irb_name))
# end set_internal_vn_irb_config
def add_irb_config(self, ri_conf):
vn = ri_conf.get("vn")
is_l2 = ri_conf.get("is_l2", False)
is_l2_l3 = ri_conf.get("is_l2_l3", False)
gateways = ri_conf.get("gateways", [])
vni = ri_conf.get("vni", None)
network_id = ri_conf.get("network_id", None)
interfaces_config = self.interfaces_config or \
Interfaces(comment=DMUtils.interfaces_comment())
self.interfaces_config = interfaces_config
irb_intf = Interface(name='irb', gratuitous_arp_reply='')
interfaces_config.add_interface(irb_intf)
self._logger.info("Vn=" + vn.name + ", IRB: " + str(gateways) + ", pr=" + self.physical_router.name)
if gateways is not None:
intf_unit = Unit(name=str(network_id),
comment=DMUtils.vn_irb_comment(vn, False, is_l2_l3))
irb_intf.add_unit(intf_unit)
if self.is_spine():
intf_unit.set_proxy_macip_advertisement('')
family = Family()
intf_unit.set_family(family)
inet = None
inet6 = None
for (irb_ip, gateway) in gateways:
if ':' in irb_ip:
if not inet6:
inet6 = FamilyInet6()
family.set_inet6(inet6)
addr = Address()
inet6.add_address(addr)
else:
if not inet:
inet = FamilyInet()
family.set_inet(inet)
addr = Address()
inet.add_address(addr)
addr.set_name(irb_ip)
addr.set_comment(DMUtils.irb_ip_comment(irb_ip))
if len(gateway) and gateway != '0.0.0.0':
addr.set_virtual_gateway_address(gateway)
# end add_irb_config
# lo0 interface in RI for route lookup to happen for Inter VN traffic
# qfx10k pfe limitation
def add_bogus_lo0(self, ri, network_id, vn):
if not self.is_spine():
return
interfaces_config = self.interfaces_config or \
Interfaces(comment=DMUtils.interfaces_comment())
ifl_num = str(1000 + int(network_id))
lo_intf = Interface(name="lo0")
interfaces_config.add_interface(lo_intf)
intf_unit = Unit(name=ifl_num, comment=DMUtils.l3_bogus_lo_intf_comment(vn))
lo_intf.add_unit(intf_unit)
family = Family()
intf_unit.set_family(family)
inet = FamilyInet()
family.set_inet(inet)
addr = Address()
inet.add_address(addr)
lo_ip = "127.0.0.1/32"
addr.set_name(lo_ip)
ri.add_interface(Interface(name="lo0." + ifl_num))
self.interfaces_config = interfaces_config
# end add_bogus_lo0
'''
ri_name: routing instance name to be configured on mx
is_l2: a flag used to indicate routing instance type, i.e : l2 or l3
is_l2_l3: VN forwarding mode is of type 'l2_l3' or not
import/export targets: routing instance import, export targets
prefixes: for l3 vrf static routes
gateways: for l2 evpn
interfaces: logical interfaces to be part of vrf
network_id : this is used for configuraing irb interfaces
'''
def add_routing_instance(self, ri_conf):
ri_name = ri_conf.get("ri_name")
vn = ri_conf.get("vn")
is_l2 = ri_conf.get("is_l2", False)
is_l2_l3 = ri_conf.get("is_l2_l3", False)
import_targets = ri_conf.get("import_targets", set())
export_targets = ri_conf.get("export_targets", set())
prefixes = ri_conf.get("prefixes", [])
gateways = ri_conf.get("gateways", [])
interfaces = ri_conf.get("interfaces", [])
vni = ri_conf.get("vni", None)
network_id = ri_conf.get("network_id", None)
is_internal_vn = True if '_contrail_lr_internal_vn_' in vn.name else False
self.routing_instances[ri_name] = ri_conf
ri_config = None
policy_config = self.policy_config or \
PolicyOptions(comment=DMUtils.policy_options_comment())
ri = None
ri_opt = None
ri_config = self.ri_config or \
RoutingInstances(comment=DMUtils.routing_instances_comment())
ri = Instance(name=ri_name)
if not is_l2:
ri_config.add_instance(ri)
ri.set_vrf_import(DMUtils.make_import_name(ri_name))
ri.set_vrf_export(DMUtils.make_export_name(ri_name))
has_ipv6_prefixes = DMUtils.has_ipv6_prefixes(prefixes)
has_ipv4_prefixes = DMUtils.has_ipv4_prefixes(prefixes)
if not is_l2:
if ri_opt is None:
ri_opt = RoutingInstanceRoutingOptions()
ri.set_routing_options(ri_opt)
ri.set_instance_type("vrf")
for interface in interfaces:
ri.add_interface(Interface(name=interface.name))
family = Family()
if has_ipv4_prefixes:
family.set_inet(FamilyInet(unicast=''))
if has_ipv6_prefixes:
family.set_inet6(FamilyInet6(unicast=''))
if has_ipv4_prefixes or has_ipv6_prefixes:
auto_export = AutoExport(family=family)
ri_opt.set_auto_export(auto_export)
if is_internal_vn:
self.internal_vn_ris.append(ri)
self.add_bogus_lo0(ri, network_id, vn)
if self.is_spine() and is_l2_l3:
self.add_irb_config(ri_conf)
self.attach_irb(ri_conf, ri)
lr_uuid = None
if is_internal_vn:
lr_uuid = DMUtils.extract_lr_uuid_from_internal_vn_name(ri_name)
# add policies for export route targets
if self.is_spine():
ps = PolicyStatement(name=DMUtils.make_export_name(ri_name))
ps.set_comment(DMUtils.vn_ps_comment(vn, "Export"))
then = Then()
ps.add_term(Term(name="t1", then=then))
for route_target in export_targets:
comm = Community(add='',
community_name=DMUtils.make_community_name(route_target))
then.add_community(comm)
then.set_accept('')
policy_config.add_policy_statement(ps)
self.add_to_global_switch_opts(DMUtils.make_export_name(ri_name), False)
# add policies for import route targets
ps = PolicyStatement(name=DMUtils.make_import_name(ri_name))
ps.set_comment(DMUtils.vn_ps_comment(vn, "Import"))
# add term switch policy
from_ = From()
term = Term(name=DMUtils.get_switch_policy_name(), fromxx=from_)
ps.add_term(term)
from_.add_community(DMUtils.get_switch_policy_name())
term.set_then(Then(accept=''))
from_ = From()
term = Term(name="t1", fromxx=from_)
ps.add_term(term)
for route_target in import_targets:
from_.add_community(DMUtils.make_community_name(route_target))
if not is_internal_vn:
self.add_vni_option(vni or network_id, route_target)
term.set_then(Then(accept=''))
policy_config.add_policy_statement(ps)
self.add_to_global_switch_opts(DMUtils.make_import_name(ri_name), True)
# add L2 EVPN and BD config
interfaces_config = self.interfaces_config
if (is_l2 and vni is not None and
self.is_family_configured(self.bgp_params, "e-vpn")):
# add vlan config
vlan_conf = self.add_vlan_config(ri_name, vni, is_l2_l3, "irb." + str(network_id))
interfaces_config = self.interfaces_config or Interfaces(comment=DMUtils.interfaces_comment())
self.build_l2_evpn_interface_config(interfaces_config,
interfaces, vn, vlan_conf)
if (not is_l2 and (vni is not None or (is_internal_vn and lr_uuid)) and \
self.is_family_configured(self.bgp_params, "e-vpn")):
evpn = self.build_evpn_config(int_vn = is_internal_vn)
if evpn:
ri.set_protocols(RoutingInstanceProtocols(evpn=evpn))
if is_internal_vn and lr_uuid:
ip_prefix_support = IpPrefixSupport()
#ip_prefix_support.set_forwarding_mode("symmetric")
ip_prefix_support.set_encapsulation("vxlan")
ip_prefix_support.set_vni(str(vni))
ip_prefix_support.set_advertise("direct-nexthop")
evpn.set_ip_prefix_support(ip_prefix_support)
else:
ri.set_vtep_source_interface("lo0.0")
if not is_internal_vn:
#add vlans
self.add_ri_vlan_config(ri, vni)
if (not is_l2 and not is_l2_l3 and gateways):
interfaces_config = self.interfaces_config or \
Interfaces(comment=DMUtils.interfaces_comment())
ifl_num = str(1000 + int(network_id))
lo_intf = Interface(name="lo0")
interfaces_config.add_interface(lo_intf)
intf_unit = Unit(name=ifl_num, comment=DMUtils.l3_lo_intf_comment(vn))
lo_intf.add_unit(intf_unit)
family = Family()
intf_unit.set_family(family)
inet = None
inet6 = None
for (lo_ip, _) in gateways:
subnet = lo_ip
(ip, _) = lo_ip.split('/')
if ':' in lo_ip:
if not inet6:
inet6 = FamilyInet6()
family.set_inet6(inet6)
addr = Address()
inet6.add_address(addr)
lo_ip = ip + '/' + '128'
else:
if not inet:
inet = FamilyInet()
family.set_inet(inet)
addr = Address()
inet.add_address(addr)
lo_ip = ip + '/' + '32'
addr.set_name(lo_ip)
addr.set_comment(DMUtils.lo0_ip_comment(subnet))
ri.add_interface(Interface(name="lo0." + ifl_num,
comment=DMUtils.lo0_ri_intf_comment(vn)))
self.policy_config = policy_config
self.interfaces_config = interfaces_config
self.route_targets |= import_targets | export_targets
self.ri_config = ri_config
# end add_routing_instance
def attach_acls(self, interface, unit):
if self.is_spine() or not interface.li_uuid:
return
interface = LogicalInterfaceDM.find_by_name_or_uuid(interface.li_uuid)
if not interface:
return
sg_list = interface.get_attached_sgs()
filter_list = []
for sg in sg_list:
flist = self.get_configured_filters(sg)
filter_list += flist
if filter_list:
ethernet = FamilyEthernet()
efilter = EthernetFilter()
for fname in filter_list:
efilter.add_input_list(fname)
ethernet.set_filter(efilter)
unit.set_family(Family(ethernet_switching=ethernet))
# end attach_acls
def build_l2_evpn_interface_config(self, interfaces_config, interfaces, vn, vlan_conf):
ifd_map = {}
for interface in interfaces:
ifd_map.setdefault(interface.ifd_name, []).append(interface)
for ifd_name, interface_list in ifd_map.items():
intf = Interface(name=ifd_name)
interfaces_config.add_interface(intf)
intf.set_flexible_vlan_tagging('')
intf.set_encapsulation("extended-vlan-bridge")
if interface_list[0].is_untagged():
if (len(interface_list) > 1):
self._logger.error(
"invalid logical interfaces config for ifd %s" % (
ifd_name))
continue
unit = Unit(name=interface_list[0].unit,
comment=DMUtils.l2_evpn_intf_unit_comment(vn, False),
vlan_id="4094")
# attach acls
self.attach_acls(interface_list[0], unit)
intf.add_unit(unit)
intf.set_native_vlan_id("4094")
vlan_conf.add_interface(Interface(name=ifd_name + ".0"))
else:
for interface in interface_list:
unit = Unit(name=interface.unit,
comment=DMUtils.l2_evpn_intf_unit_comment(vn,
True, interface.vlan_tag),
vlan_id=str(interface.vlan_tag))
# attach acls
self.attach_acls(interface, unit)
intf.add_unit(unit)
vlan_conf.add_interface(Interface(name=ifd_name + "." + str(interface.unit)))
# end build_l2_evpn_interface_config
@abc.abstractmethod
def build_evpn_config(self):
"""build evpn config depending on qfx model"""
# end build_evpn_config
def init_evpn_config(self):
if not self.routing_instances:
# no vn config then no need to configure evpn
return
if self.evpn:
# evpn init done
return
self.evpn = self.build_evpn_config()
self.evpn.set_extended_vni_list('all')
if not self.is_spine():
self.evpn.set_multicast_mode("ingress-replication")
if not self.proto_config:
self.proto_config = Protocols(comment=DMUtils.protocols_comment())
self.proto_config.set_evpn(self.evpn)
# end init_evpn_config
def add_vni_option(self, vni, vrf_target):
if not self.evpn:
self.init_evpn_config()
vni_options = self.evpn.get_vni_options()
if not vni_options:
vni_options = VniOptions()
self.evpn.set_extended_vni_list("all")
vni_options.add_vni(Vni(name=str(vni), vrf_target=VniTarget(community=vrf_target)))
self.evpn.set_vni_options(vni_options)
def init_global_switch_opts(self):
if self.global_switch_options_config is None:
self.global_switch_options_config = SwitchOptions(comment=DMUtils.switch_options_comment())
self.global_switch_options_config.set_vtep_source_interface("lo0.0")
if not self.routing_instances:
# no vn config then no need to configure vrf target
return
self.global_switch_options_config.add_vrf_target(VniTarget(auto=''))
switch_options_community = DMUtils.get_switch_vrf_import(self.get_asn())
self.global_switch_options_config.add_vrf_target(VniTarget(community=switch_options_community))
self.set_global_export_policy()
# end init_global_switch_opts
def set_global_export_policy(self):
if self.is_spine():
return
export_policy = DMUtils.get_switch_export_policy_name()
ps = PolicyStatement(name=export_policy)
ps.set_comment(DMUtils.switch_export_policy_comment())
export_community = DMUtils.get_switch_export_community_name()
then = Then()
comm = Community(add='', community_name=export_community)
then.add_community(comm)
ps.add_term(Term(name="t1", then=then))
if not self.policy_config:
self.policy_config = PolicyOptions(comment=DMUtils.policy_options_comment())
self.policy_config.add_policy_statement(ps)
if not self.global_switch_options_config:
self.global_switch_options_config = SwitchOptions(comment=DMUtils.switch_options_comment())
self.global_switch_options_config.add_vrf_export(export_policy)
# end set_global_export_policy
def add_to_global_switch_opts(self, policy, is_import):
if not self.global_switch_options_config:
self.init_global_switch_opts()
if is_import:
self.global_switch_options_config.add_vrf_import(policy)
else:
self.global_switch_options_config.add_vrf_export(policy)
# end add_to_global_switch_opts
def set_route_targets_config(self):
if self.policy_config is None:
self.policy_config = PolicyOptions(comment=DMUtils.policy_options_comment())
# add export community
export_comm = CommunityType(name=DMUtils.get_switch_export_community_name())
for route_target in self.route_targets:
comm = CommunityType(name=DMUtils.make_community_name(route_target))
comm.add_members(route_target)
self.policy_config.add_community(comm)
# add route-targets to export community
export_comm.add_members(route_target)
# if no members, no need to add community
if export_comm.get_members():
self.policy_config.add_community(export_comm)
# add community for switch options
comm = CommunityType(name=DMUtils.get_switch_policy_name())
comm.add_members(DMUtils.get_switch_vrf_import(self.get_asn()))
self.policy_config.add_community(comm)
# end set_route_targets_config
def add_vlan_config(self, vrf_name, vni, is_l2_l3=False, irb_intf=None):
if not self.vlans_config:
self.vlans_config = Vlans(comment=DMUtils.vlans_comment())
vxlan = VXLan(vni=vni)
vlan = Vlan(name=vrf_name[1:], vxlan=vxlan)
if is_l2_l3 and self.is_spine():
if not irb_intf:
self._logger.error("Missing irb interface config l3 vlan: %s" % vrf_name)
else:
vlan.set_vlan_id(str(vni))
vlan.set_l3_interface(irb_intf)
self.vlans_config.add_vlan(vlan)
return vlan
# end add_vlan_config
def add_ri_vlan_config(self, ri, vni):
vxlan = VXLan(vni=vni)
vlan = Vlan(name=vrf_name[1:], vlan_id=str(vni), vxlan=vxlan)
vlans = ri.get_vlans()
if not vlans:
vlans = Vlans()
vlans.add_vlan(vlan)
ri.set_vlans(vlans)
# end add_ri_vlan_config
# Product Specific configuration, called from parent class
def add_product_specific_config(self, groups):
groups.set_switch_options(self.global_switch_options_config)
if self.vlans_config:
groups.set_vlans(self.vlans_config)
if self.chassis_config:
groups.set_chassis(self.chassis_config)
# end add_product_specific_config
def set_route_distinguisher_config(self):
if not self.routing_instances or not self.bgp_params.get('identifier'):
# no vn config then no need to configure route distinguisher
return
if self.global_switch_options_config is None:
self.global_switch_options_config = SwitchOptions(comment=DMUtils.switch_options_comment())
self.global_switch_options_config.set_route_distinguisher(
RouteDistinguisher(rd_type=self.bgp_params['identifier'] + ":1"))
# end set_route_distinguisher_config
# def build_esi_config(self):
# pr = self.physical_router
# if not pr or self.is_spine():
# return
# if not self.interfaces_config:
# self.interfaces_config = Interfaces(comment=DMUtils.interfaces_comment())
# for pi_uuid in pr.physical_interfaces:
# pi = PhysicalInterfaceDM.get(pi_uuid)
# if not pi or not pi.esi or pi.esi == "0" or pi.get_parent_ae_id():
# continue
# esi_conf = Esi(identifier=pi.esi, all_active='')
# intf = Interface(name=pi.name, esi=esi_conf)
# self.interfaces_config.add_interface(intf)
# # add ae interfaces
# # self.ae_id_map should have all esi => ae_id mapping
# for esi, ae_id in self.physical_router.ae_id_map.items():
# esi_conf = Esi(identifier=esi, all_active='')
# intf = Interface(name="ae" + str(ae_id), esi=esi_conf)
# self.interfaces_config.add_interface(intf)
# end build_esi_config
def get_vn_li_map(self):
pr = self.physical_router
vn_list = []
# get all logical router connected networks
for lr_id in pr.logical_routers or []:
lr = LogicalRouterDM.get(lr_id)
if not lr:
continue
vn_list += lr.get_connected_networks(include_internal=True)
vn_dict = {}
for vn_id in vn_list:
vn_dict[vn_id] = []
li_set = pr.logical_interfaces
for pi_uuid in pr.physical_interfaces:
pi = PhysicalInterfaceDM.get(pi_uuid)
if pi is None:
continue
li_set |= pi.logical_interfaces
for li_uuid in li_set:
li = LogicalInterfaceDM.get(li_uuid)
if li is None:
continue
vmi_id = li.virtual_machine_interface
vmi = VirtualMachineInterfaceDM.get(vmi_id)
if vmi is None:
continue
vn_id = vmi.virtual_network
if li.physical_interface:
pi = PhysicalInterfaceDM.get(li.physical_interface)
# ae_id = pi.get_parent_ae_id()
# if ae_id and li.physical_interface:
# _, unit= li.name.split('.')
# ae_name = "ae" + str(ae_id) + "." + unit
# vn_dict.setdefault(vn_id, []).append(
# JunosInterface(ae_name, li.li_type, li.vlan_tag))
# continue
vn_dict.setdefault(vn_id, []).append(
JunosInterface(li.name, li.li_type, li.vlan_tag, li_uuid=li.uuid))
return vn_dict
# end
def get_vn_associated_physical_interfaces(self):
pr = self.physical_router
li_set = set()
pi_list = []
for pi_uuid in pr.physical_interfaces:
pi = PhysicalInterfaceDM.get(pi_uuid)
if pi is None or not pi.esi or pi.esi == "0":
continue
if self.has_vmi(pi.logical_interfaces):
pi_list.append(pi)
return pi_list
# end get_vn_associated_physical_interfaces
def has_vmi(self, li_set):
if not li_set:
return False
for li_uuid in li_set:
li = LogicalInterfaceDM.get(li_uuid)
if not li or not li.virtual_machine_interface \
or not VirtualMachineInterfaceDM.get(li.virtual_machine_interface):
continue
return True
return False
# end has_vmi
def get_ae_alloc_esi_map(self):
pi_list = self.get_vn_associated_physical_interfaces()
esi_map = {}
for pi in pi_list:
if not pi.name.startswith("ae") and pi.esi:
esi_map.setdefault(pi.esi, []).append(pi)
return esi_map
# end get_ae_alloc_esi_map
def is_l2_supported(self, vn):
""" Check l2 capability """
return True
# end is_l2_supported
@abc.abstractmethod
def is_l3_supported(self, vn):
""" Check l3 capability """
return False
# end is_l3_supported
def set_resolve_bgp_route_target_family_config(self):
""" configure resolution config in global routing options if needed """
if not self.global_routing_options_config:
self.global_routing_options_config = RoutingOptions(
comment=DMUtils.routing_options_comment())
resolve = Resolution(rib=RIB(name="bgp.rtarget.0",
resolution_ribs="inet.0"))
self.global_routing_options_config.set_resolution(resolve)
# end set_resolve_bgp_route_target_family_config
def set_chassis_config(self):
device_count = DMUtils.get_max_ae_device_count()
aggr_devices = AggregatedDevices(Ethernet(device_count=device_count))
if not self.chassis_config:
self.chassis_config = Chassis()
self.chassis_config.set_aggregated_devices(aggr_devices)
# end set_chassis_config
def build_ae_config(self, esi_map):
if esi_map:
self.set_chassis_config()
interfaces_config = self.interfaces_config or \
Interfaces(comment=DMUtils.interfaces_comment())
# self.ae_id_map should have all esi => ae_id mapping
# esi_map should have esi => interface memberships
for esi, ae_id in self.physical_router.ae_id_map.items():
# config ae interface
ae_name = "ae" + str(ae_id)
intf = Interface(name=ae_name)
interfaces_config.add_interface(intf)
priority = DMUtils.lacp_system_priority()
system_id = esi[-17:] #last 17 charcaters from esi for ex: 00:00:00:00:00:05
lacp = Lacp(active='', system_priority=priority, \
system_id=system_id, admin_key=1)
intf.set_aggregated_ether_options(AggregatedEtherOptions(lacp=lacp))
# associate 'ae' membership
pi_list = esi_map.get(esi)
for pi in pi_list or []:
intf = Interface(name=pi.name)
interfaces_config.add_interface(intf)
etherOptions = EtherOptions(ieee_802_3ad=Ieee802(bundle=ae_name))
intf.set_gigether_options(etherOptions)
self.interfaces_config = interfaces_config
# end build_ae_config
def add_addr_term(self, term, addr_match, is_src):
if not addr_match:
return None
subnet = addr_match.get_subnet()
if not subnet:
return None
subnet_ip = subnet.get_ip_prefix()
subnet_len = subnet.get_ip_prefix_len()
if not subnet_ip or not subnet_len:
return None
from_ = term.get_from() or From()
term.set_from(from_)
if is_src:
from_.add_ip_source_address(str(subnet_ip) + "/" + str(subnet_len))
else:
from_.add_ip_destination_address(str(subnet_ip) + "/" + str(subnet_len))
# end add_addr_term
def add_port_term(self, term, port_match, is_src):
if not port_match:
return None
start_port = port_match.get_start_port()
end_port = port_match.get_end_port()
if not start_port or not end_port:
return None
port_str = str(start_port) + "-" + str(end_port)
from_ = term.get_from() or From()
term.set_from(from_)
if is_src:
from_.add_source_port(port_str)
else:
from_.add_destination_port(port_str)
# end add_port_term
def add_filter_term(self, ff, name):
term = Term()
term.set_name(name)
ff.add_term(term)
term.set_then(Then(accept=''))
return term
def add_protocol_term(self, term, protocol_match):
if not protocol_match or protocol_match == 'any':
return None
from_ = term.get_from() or From()
term.set_from(from_)
from_.set_ip_protocol(protocol_match)
# end add_protocol_term
def add_dns_dhcp_terms(self, ff):
port_list = [67, 68, 53]
term = Term()
term.set_name("allow-dns-dhcp")
from_ = From()
from_.set_ip_protocol("udp")
term.set_from(from_)
for port in port_list:
from_.add_source_port(str(port))
term.set_then(Then(accept=''))
ff.add_term(term)
# end add_dns_dhcp_terms
def add_ether_type_term(self, ff, ether_type_match):
if not ether_type_match:
return None
term = Term()
from_ = From()
term.set_from(from_)
term.set_name("ether-type")
from_.set_ether_type(ether_type_match.lower())
term.set_then(Then(accept=''))
ff.add_term(term)
# end add_ether_type_term
def build_firewall_filters(self, sg, acl, is_egress=False):
if self.is_spine():
return
if not sg or not acl or not acl.vnc_obj:
return
acl = acl.vnc_obj
entries = acl.get_access_control_list_entries()
if not entries:
return
rules = entries.get_acl_rule() or []
if not rules:
return
firewall_config = self.firewall_config or Firewall(DMUtils.firewall_comment())
ff = firewall_config.get_family() or FirewallFamily()
firewall_config.set_family(ff)
eswitching = ff.get_ethernet_switching() or FirewallEthernet()
ff.set_ethernet_switching(eswitching)
for rule in rules:
if not self.has_terms(rule):
continue
match = rule.get_match_condition()
if not match:
continue
rule_uuid = rule.get_rule_uuid()
dst_addr_match = match.get_dst_address()
dst_port_match = match.get_dst_port()
ether_type_match = match.get_ethertype()
protocol_match = match.get_protocol()
src_addr_match = match.get_src_address()
src_port_match = match.get_src_port()
filter_name = DMUtils.make_sg_filter_name(sg.name, ether_type_match, rule_uuid)
f = FirewallFilter(name=filter_name)
f.set_comment(DMUtils.sg_firewall_comment(sg.name, ether_type_match, rule_uuid))
# allow arp ether type always
self.add_ether_type_term(f, 'arp')
# allow dhcp/dns always
self.add_dns_dhcp_terms(f)
default_term = self.add_filter_term(f, "default-term")
self.add_addr_term(default_term, dst_addr_match, False)
self.add_addr_term(default_term, src_addr_match, True)
self.add_port_term(default_term, dst_port_match, False)
# source port match is not needed for now (BMS source port)
#self.add_port_term(default_term, src_port_match, True)
self.add_protocol_term(default_term, protocol_match)
eswitching.add_filter(f)
if not eswitching.get_filter():
ff.set_ethernet_switching(None)
self.firewall_config = firewall_config
# end build_firewall_filters
def build_firewall_config(self):
if self.is_spine():
return
sg_list = LogicalInterfaceDM.get_sg_list()
for sg in sg_list or []:
acls = sg.access_control_lists
for acl in acls or []:
acl = AccessControlListDM.get(acl)
if acl and not acl.is_ingress:
self.build_firewall_filters(sg, acl)
# end build_firewall_config
def is_default_sg(self, match):
if (not match.get_dst_address()) or \
(not match.get_dst_port()) or \
(not match.get_ethertype()) or \
(not match.get_src_address()) or \
(not match.get_src_port()) or \
(not match.get_protocol()):
return False
if not match.get_dst_address().get_subnet():
return False
if ((str(match.get_dst_address().get_subnet().get_ip_prefix()) == "0.0.0.0") or \
(str(match.get_dst_address().get_subnet().get_ip_prefix()) == "::")) and \
(str(match.get_dst_address().get_subnet().get_ip_prefix_len()) == "0") and \
(str(match.get_dst_port().get_start_port()) == "0") and \
(str(match.get_dst_port().get_end_port()) == "65535") and \
((str(match.get_ethertype()) == "IPv4") or \
(str(match.get_ethertype()) == "IPv6")) and \
(not match.get_src_address().get_subnet()) and \
(not match.get_src_address().get_subnet_list()) and \
(str(match.get_src_port().get_start_port()) == "0") and \
(str(match.get_src_port().get_end_port()) == "65535") and \
(str(match.get_protocol()) == "any"):
return True
return False
# end is_default_sg
def has_terms(self, rule):
match = rule.get_match_condition()
if not match:
return False
# return False if it is default SG, no filter is applied
if self.is_default_sg(match):
return False
return match.get_dst_address() or match.get_dst_port() or \
match.get_ethertype() or match.get_src_address() or match.get_src_port() or \
(match.get_protocol() and match.get_protocol() != 'any')
def get_firewall_filters(self, sg, acl, is_egress=False):
if not sg or not acl or not acl.vnc_obj:
return []
acl = acl.vnc_obj
entries = acl.get_access_control_list_entries()
if not entries:
return []
rules = entries.get_acl_rule() or []
if not rules:
return []
filter_names = []
for rule in rules:
if not self.has_terms(rule):
continue
match = rule.get_match_condition()
if not match:
continue
rule_uuid = rule.get_rule_uuid()
ether_type_match = match.get_ethertype()
if not ether_type_match:
continue
if 'ipv6' in ether_type_match.lower():
continue
filter_name = DMUtils.make_sg_filter_name(sg.name, ether_type_match, rule_uuid)
filter_names.append(filter_name)
return filter_names
# end get_firewall_filters
def get_configured_filters(self, sg):
if not sg:
return []
filter_names = []
acls = sg.access_control_lists
for acl in acls or []:
acl = AccessControlListDM.get(acl)
if acl and not acl.is_ingress:
fnames = self.get_firewall_filters(sg, acl)
filter_names += fnames
return filter_names
# end get_configured_filters
def build_ri_config(self):
# if not self.is_spine():
# esi_map = self.get_ae_alloc_esi_map()
# self.physical_router.evaluate_ae_id_map(esi_map)
# self.build_ae_config(esi_map)
vn_dict = self.get_vn_li_map()
vn_irb_ip_map = None
if self.is_spine():
self.physical_router.evaluate_vn_irb_ip_map(set(vn_dict.keys()), 'l2_l3', 'irb', False)
self.physical_router.evaluate_vn_irb_ip_map(set(vn_dict.keys()), 'l3', 'lo0', True)
vn_irb_ip_map = self.physical_router.get_vn_irb_ip_map()
for vn_id, interfaces in vn_dict.items():
vn_obj = VirtualNetworkDM.get(vn_id)
if (vn_obj is None or
vn_obj.get_vxlan_vni() is None or
vn_obj.vn_network_id is None):
continue
export_set = None
import_set = None
for ri_id in vn_obj.routing_instances:
# Find the primary RI by matching the name
ri_obj = RoutingInstanceDM.get(ri_id)
if ri_obj is None:
continue
if ri_obj.fq_name[-1] == vn_obj.fq_name[-1]:
vrf_name_l2 = DMUtils.make_vrf_name(vn_obj.fq_name[-1],
vn_obj.vn_network_id, 'l2')
vrf_name_l3 = DMUtils.make_vrf_name(vn_obj.fq_name[-1],
vn_obj.vn_network_id, 'l3')
export_set = copy.copy(ri_obj.export_targets)
import_set = copy.copy(ri_obj.import_targets)
if self.is_spine():
for ri2_id in ri_obj.routing_instances:
ri2 = RoutingInstanceDM.get(ri2_id)
if ri2 is None:
continue
import_set |= ri2.export_targets
if vn_obj.get_forwarding_mode() in ['l2', 'l2_l3']:
irb_ips = None
if vn_obj.get_forwarding_mode() == 'l2_l3' and self.is_spine():
irb_ips = vn_irb_ip_map['irb'].get(vn_id, [])
ri_conf = { 'ri_name': vrf_name_l2, 'vn': vn_obj }
ri_conf['is_l2'] = True
ri_conf['is_l2_l3'] = (vn_obj.get_forwarding_mode() == 'l2_l3')
ri_conf['import_targets'] = import_set
if self.is_spine():
ri_conf['export_targets'] = export_set
ri_conf['prefixes'] = vn_obj.get_prefixes()
ri_conf['gateways'] = irb_ips
ri_conf['interfaces'] = interfaces
ri_conf['vni'] = vn_obj.get_vxlan_vni()
ri_conf['network_id'] = vn_obj.vn_network_id
self.add_routing_instance(ri_conf)
is_internal_vn = True if '_contrail_lr_internal_vn_' in vn_obj.name else False
if vn_obj.get_forwarding_mode() in ['l3'] and self.is_l3_supported(vn_obj):
interfaces = []
lo0_ips = vn_irb_ip_map['lo0'].get(vn_id, [])
ri_conf = { 'ri_name': vrf_name_l3, 'vn': vn_obj }
ri_conf['is_l2'] = False
ri_conf['is_l2_l3'] = False
ri_conf['import_targets'] = import_set
ri_conf['export_targets'] = export_set
ri_conf['prefixes'] = vn_obj.get_prefixes()
ri_conf['interfaces'] = interfaces
if is_internal_vn:
ri_conf['vni'] = vn_obj.get_vxlan_vni(is_internal_vn = is_internal_vn)
ri_conf['gateways'] = lo0_ips
ri_conf['network_id'] = vn_obj.vn_network_id
self.add_routing_instance(ri_conf)
break
return
# end build_ri_config
def set_qfx_common_config(self):
self.build_bgp_config()
self.build_ri_config()
self.set_internal_vn_irb_config()
self.init_evpn_config()
self.build_firewall_config()
self.init_global_switch_opts()
self.set_resolve_bgp_route_target_family_config()
# self.build_esi_config()
self.set_route_targets_config()
self.set_route_distinguisher_config()
# end set_qfx_common_config
# end QfxConf
|
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A Python interface for creating TensorFlow servers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import cluster_pb2
from tensorflow.core.protobuf import tensorflow_server_pb2
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.framework import errors
from tensorflow.python.util import compat
def _make_server_def(server_or_cluster_def, job_name, task_index, protocol,
config):
"""Creates a `tf.train.ServerDef` protocol buffer.
Args:
server_or_cluster_def: A `tf.train.ServerDef` or
`tf.train.ClusterDef` protocol buffer, or a
`tf.train.ClusterSpec` object, describing the server to be
defined and/or the cluster of which it is a member.
job_name: (Optional.) Specifies the name of the job of which the server
is a member. Defaults to the value in `server_or_cluster_def`, if
specified.
task_index: (Optional.) Specifies the task index of the server in its job.
Defaults to the value in `server_or_cluster_def`, if specified. Otherwise
defaults to 0 if the server's job has only one task.
protocol: (Optional.) Specifies the protocol to be used by the server.
Acceptable values include `"grpc"`. Defaults to the value in
`server_or_cluster_def`, if specified. Otherwise defaults to `"grpc"`.
config: (Options.) A `tf.ConfigProto` that specifies default configuration
options for all sessions that run on this server.
Returns:
A `tf.train.ServerDef`.
Raises:
TypeError: If the arguments do not have the appropriate type.
ValueError: If an argument is not specified and cannot be inferred.
"""
server_def = tensorflow_server_pb2.ServerDef()
if isinstance(server_or_cluster_def, tensorflow_server_pb2.ServerDef):
server_def.MergeFrom(server_or_cluster_def)
if job_name is not None:
server_def.job_name = job_name
if task_index is not None:
server_def.task_index = task_index
if protocol is not None:
server_def.protocol = protocol
if config is not None:
server_def.default_session_config.MergeFrom(config)
else:
try:
cluster_spec = ClusterSpec(server_or_cluster_def)
except TypeError:
raise TypeError("Could not convert `server_or_cluster_def` to a "
"`tf.train.ServerDef` or `tf.train.ClusterSpec`.")
if job_name is None:
if len(cluster_spec.jobs) == 1:
job_name = cluster_spec.jobs[0]
else:
raise ValueError("Must specify an explicit `job_name`.")
if task_index is None:
task_indices = cluster_spec.task_indices(job_name)
if len(task_indices) == 1:
task_index = task_indices[0]
else:
raise ValueError("Must specify an explicit `task_index`.")
if protocol is None:
protocol = "grpc"
server_def = tensorflow_server_pb2.ServerDef(
cluster=cluster_spec.as_cluster_def(),
job_name=job_name, task_index=task_index, protocol=protocol)
if config is not None:
server_def.default_session_config.MergeFrom(config)
return server_def
class Server(object):
"""An in-process TensorFlow server, for use in distributed training.
A `tf.train.Server` instance encapsulates a set of devices and a
@{tf.Session} target that
can participate in distributed training. A server belongs to a
cluster (specified by a @{tf.train.ClusterSpec}), and
corresponds to a particular task in a named job. The server can
communicate with any other server in the same cluster.
"""
def __init__(self,
server_or_cluster_def,
job_name=None,
task_index=None,
protocol=None,
config=None,
start=True):
"""Creates a new server with the given definition.
The `job_name`, `task_index`, and `protocol` arguments are optional, and
override any information provided in `server_or_cluster_def`.
Args:
server_or_cluster_def: A `tf.train.ServerDef` or
`tf.train.ClusterDef` protocol buffer, or a
`tf.train.ClusterSpec` object, describing the server to be
created and/or the cluster of which it is a member.
job_name: (Optional.) Specifies the name of the job of which the server
is a member. Defaults to the value in `server_or_cluster_def`, if
specified.
task_index: (Optional.) Specifies the task index of the server in its
job. Defaults to the value in `server_or_cluster_def`, if specified.
Otherwise defaults to 0 if the server's job has only one task.
protocol: (Optional.) Specifies the protocol to be used by the server.
Acceptable values include `"grpc"`. Defaults to the value in
`server_or_cluster_def`, if specified. Otherwise defaults to `"grpc"`.
config: (Options.) A `tf.ConfigProto` that specifies default
configuration options for all sessions that run on this server.
start: (Optional.) Boolean, indicating whether to start the server
after creating it. Defaults to `True`.
Raises:
tf.errors.OpError: Or one of its subclasses if an error occurs while
creating the TensorFlow server.
"""
self._server_def = _make_server_def(server_or_cluster_def,
job_name, task_index, protocol, config)
with errors.raise_exception_on_not_ok_status() as status:
self._server = pywrap_tensorflow.PyServer_New(
self._server_def.SerializeToString(), status)
if start:
self.start()
def start(self):
"""Starts this server.
Raises:
tf.errors.OpError: Or one of its subclasses if an error occurs while
starting the TensorFlow server.
"""
with errors.raise_exception_on_not_ok_status() as status:
pywrap_tensorflow.PyServer_Start(self._server, status)
def join(self):
"""Blocks until the server has shut down.
This method currently blocks forever.
Raises:
tf.errors.OpError: Or one of its subclasses if an error occurs while
joining the TensorFlow server.
"""
with errors.raise_exception_on_not_ok_status() as status:
pywrap_tensorflow.PyServer_Join(self._server, status)
@property
def server_def(self):
"""Returns the `tf.train.ServerDef` for this server.
Returns:
A `tf.train.ServerDef` protocol buffer that describes the configuration
of this server.
"""
return self._server_def
@property
def target(self):
"""Returns the target for a `tf.Session` to connect to this server.
To create a
@{tf.Session} that
connects to this server, use the following snippet:
```python
server = tf.train.Server(...)
with tf.Session(server.target):
# ...
```
Returns:
A string containing a session target for this server.
"""
return self._server.target()
@staticmethod
def create_local_server(config=None, start=True):
"""Creates a new single-process cluster running on the local host.
This method is a convenience wrapper for creating a
`tf.train.Server` with a `tf.train.ServerDef` that specifies a
single-process cluster containing a single task in a job called
`"local"`.
Args:
config: (Options.) A `tf.ConfigProto` that specifies default
configuration options for all sessions that run on this server.
start: (Optional.) Boolean, indicating whether to start the server after
creating it. Defaults to `True`.
Returns:
A local `tf.train.Server`.
"""
# Specifying port 0 means that the OS will choose a free port for the
# server.
return Server({"local": ["localhost:0"]}, protocol="grpc", config=config,
start=start)
class ClusterSpec(object):
"""Represents a cluster as a set of "tasks", organized into "jobs".
A `tf.train.ClusterSpec` represents the set of processes that
participate in a distributed TensorFlow computation. Every
@{tf.train.Server} is constructed in a particular cluster.
To create a cluster with two jobs and five tasks, you specify the
mapping from job names to lists of network addresses (typically
hostname-port pairs).
```python
cluster = tf.train.ClusterSpec({"worker": ["worker0.example.com:2222",
"worker1.example.com:2222",
"worker2.example.com:2222"],
"ps": ["ps0.example.com:2222",
"ps1.example.com:2222"]})
```
Each job may also be specified as a sparse mapping from task indices
to network addresses. This enables a server to be configured without
needing to know the identity of (for example) all other worker
tasks:
```python
cluster = tf.train.ClusterSpec({"worker": {1: "worker1.example.com:2222"},
"ps": ["ps0.example.com:2222",
"ps1.example.com:2222"]})
```
"""
def __init__(self, cluster):
"""Creates a `ClusterSpec`.
Args:
cluster: A dictionary mapping one or more job names to (i) a
list of network addresses, or (ii) a dictionary mapping integer
task indices to network addresses; or a `tf.train.ClusterDef`
protocol buffer.
Raises:
TypeError: If `cluster` is not a dictionary mapping strings to lists
of strings, and not a `tf.train.ClusterDef` protobuf.
"""
if isinstance(cluster, dict):
self._cluster_spec = {}
for job_name, tasks in cluster.items():
if isinstance(tasks, (list, tuple)):
job_tasks = {i: task for i, task in enumerate(tasks)}
elif isinstance(tasks, dict):
job_tasks = {i: task for i, task in tasks.items()}
else:
raise TypeError("The tasks for job %r must be a list or a dictionary "
"from integers to strings." % job_name)
self._cluster_spec[job_name] = job_tasks
self._make_cluster_def()
elif isinstance(cluster, cluster_pb2.ClusterDef):
self._cluster_def = cluster
self._cluster_spec = {}
for job_def in self._cluster_def.job:
self._cluster_spec[job_def.name] = {
i: t for i, t in job_def.tasks.items()}
elif isinstance(cluster, ClusterSpec):
self._cluster_def = cluster_pb2.ClusterDef()
self._cluster_def.MergeFrom(cluster.as_cluster_def())
self._cluster_spec = {}
for job_def in self._cluster_def.job:
self._cluster_spec[job_def.name] = {
i: t for i, t in job_def.tasks.items()}
else:
raise TypeError("`cluster` must be a dictionary mapping one or more "
"job names to lists of network addresses, or a "
"`ClusterDef` protocol buffer")
def __nonzero__(self):
return bool(self._cluster_spec)
# Python 3.x
__bool__ = __nonzero__
def __eq__(self, other):
return self._cluster_spec == other
def __ne__(self, other):
return self._cluster_spec != other
def __str__(self):
key_values = self.as_dict()
string_items = [
repr(k) + ": " + repr(key_values[k]) for k in sorted(key_values)]
return "ClusterSpec({" + ", ".join(string_items) + "})"
def as_dict(self):
"""Returns a dictionary from job names to their tasks.
For each job, if the task index space is dense, the corresponding
value will be a list of network addresses; otherwise it will be a
dictionary mapping (sparse) task indices to the corresponding
addresses.
Returns:
A dictionary mapping job names to lists or dictionaries
describing the tasks in those jobs.
"""
ret = {}
for job in self.jobs:
task_indices = self.task_indices(job)
if max(task_indices) + 1 == len(task_indices):
# Return a list because the task indices are dense. This
# matches the behavior of `as_dict()` before support for
# sparse jobs was added.
ret[job] = self.job_tasks(job)
else:
ret[job] = {i: self.task_address(job, i) for i in task_indices}
return ret
def as_cluster_def(self):
"""Returns a `tf.train.ClusterDef` protocol buffer based on this cluster."""
return self._cluster_def
@property
def jobs(self):
"""Returns a list of job names in this cluster.
Returns:
A list of strings, corresponding to the names of jobs in this cluster.
"""
return list(self._cluster_spec.keys())
def num_tasks(self, job_name):
"""Returns the number of tasks defined in the given job.
Args:
job_name: The string name of a job in this cluster.
Returns:
The number of tasks defined in the given job.
Raises:
ValueError: If `job_name` does not name a job in this cluster.
"""
try:
job = self._cluster_spec[job_name]
except KeyError:
raise ValueError("No such job in cluster: %r" % job_name)
return len(job)
def task_indices(self, job_name):
"""Returns a list of valid task indices in the given job.
Args:
job_name: The string name of a job in this cluster.
Returns:
A list of valid task indices in the given job.
Raises:
ValueError: If `job_name` does not name a job in this cluster,
or no task with index `task_index` is defined in that job.
"""
try:
job = self._cluster_spec[job_name]
except KeyError:
raise ValueError("No such job in cluster: %r" % job_name)
return list(sorted(job.keys()))
def task_address(self, job_name, task_index):
"""Returns the address of the given task in the given job.
Args:
job_name: The string name of a job in this cluster.
task_index: A non-negative integer.
Returns:
The address of the given task in the given job.
Raises:
ValueError: If `job_name` does not name a job in this cluster,
or no task with index `task_index` is defined in that job.
"""
try:
job = self._cluster_spec[job_name]
except KeyError:
raise ValueError("No such job in cluster: %r" % job_name)
try:
return job[task_index]
except KeyError:
raise ValueError("No task with index %r in job %r"
% (task_index, job_name))
def job_tasks(self, job_name):
"""Returns a mapping from task ID to address in the given job.
NOTE: For backwards compatibility, this method returns a list. If
the given job was defined with a sparse set of task indices, the
length of this list may not reflect the number of tasks defined in
this job. Use the @{tf.train.ClusterSpec.num_tasks} method
to find the number of tasks defined in a particular job.
Args:
job_name: The string name of a job in this cluster.
Returns:
A list of task addresses, where the index in the list
corresponds to the task index of each task. The list may contain
`None` if the job was defined with a sparse set of task indices.
Raises:
ValueError: If `job_name` does not name a job in this cluster.
"""
try:
job = self._cluster_spec[job_name]
except KeyError:
raise ValueError("No such job in cluster: %r" % job_name)
ret = [None for _ in range(max(job.keys()) + 1)]
for i, task in job.items():
ret[i] = task
return ret
def _make_cluster_def(self):
"""Creates a `tf.train.ClusterDef` based on the given `cluster_spec`.
Raises:
TypeError: If `cluster_spec` is not a dictionary mapping strings to lists
of strings.
"""
self._cluster_def = cluster_pb2.ClusterDef()
# NOTE(mrry): Sort by job_name to produce deterministic protobufs.
for job_name, tasks in sorted(self._cluster_spec.items()):
try:
job_name = compat.as_bytes(job_name)
except TypeError:
raise TypeError("Job name %r must be bytes or unicode" % job_name)
job_def = self._cluster_def.job.add()
job_def.name = job_name
for i, task_address in sorted(tasks.items()):
try:
task_address = compat.as_bytes(task_address)
except TypeError:
raise TypeError(
"Task address %r must be bytes or unicode" % task_address)
job_def.tasks[i] = task_address
|
|
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Ironic test utilities."""
from oslo_utils import timeutils
from ironic.common import states
from ironic.db import api as db_api
def get_test_ipmi_info():
return {
"ipmi_address": "1.2.3.4",
"ipmi_username": "admin",
"ipmi_password": "fake"
}
def get_test_ipmi_bridging_parameters():
return {
"ipmi_bridging": "dual",
"ipmi_local_address": "0x20",
"ipmi_transit_channel": "0",
"ipmi_transit_address": "0x82",
"ipmi_target_channel": "7",
"ipmi_target_address": "0x72"
}
def get_test_ssh_info(auth_type='password'):
result = {
"ssh_address": "1.2.3.4",
"ssh_username": "admin",
"ssh_port": 22,
"ssh_virt_type": "vbox",
}
if 'password' == auth_type:
result['ssh_password'] = 'fake'
elif 'file' == auth_type:
result['ssh_key_filename'] = '/not/real/file'
elif 'key' == auth_type:
result['ssh_key_contents'] = '--BEGIN PRIVATE ...blah'
elif 'too_many' == auth_type:
result['ssh_password'] = 'fake'
result['ssh_key_filename'] = '/not/real/file'
else:
# No auth details (is invalid)
pass
return result
def get_test_pxe_driver_info():
return {
"deploy_kernel": "glance://deploy_kernel_uuid",
"deploy_ramdisk": "glance://deploy_ramdisk_uuid",
}
def get_test_pxe_driver_internal_info():
return {
"is_whole_disk_image": False,
}
def get_test_pxe_instance_info():
return {
"image_source": "glance://image_uuid",
"root_gb": 100,
}
def get_test_seamicro_info():
return {
"seamicro_api_endpoint": "http://1.2.3.4",
"seamicro_username": "admin",
"seamicro_password": "fake",
"seamicro_server_id": "0/0",
}
def get_test_ilo_info():
return {
"ilo_address": "1.2.3.4",
"ilo_username": "admin",
"ilo_password": "fake",
}
def get_test_drac_info():
return {
"drac_host": "1.2.3.4",
"drac_port": "443",
"drac_path": "/wsman",
"drac_protocol": "https",
"drac_username": "admin",
"drac_password": "fake",
}
def get_test_irmc_info():
return {
"irmc_address": "1.2.3.4",
"irmc_username": "admin0",
"irmc_password": "fake0",
"irmc_port": 80,
"irmc_auth_method": "digest",
}
def get_test_amt_info():
return {
"amt_address": "1.2.3.4",
"amt_protocol": "http",
"amt_username": "admin",
"amt_password": "fake",
}
def get_test_agent_instance_info():
return {
'image_source': 'fake-image',
'image_url': 'http://image',
'image_checksum': 'checksum',
'image_disk_format': 'qcow2',
'image_container_format': 'bare',
}
def get_test_agent_driver_info():
return {
'deploy_kernel': 'glance://deploy_kernel_uuid',
'deploy_ramdisk': 'glance://deploy_ramdisk_uuid',
}
def get_test_agent_driver_internal_info():
return {
'agent_url': 'http://127.0.0.1/foo',
'is_whole_disk_image': True,
}
def get_test_iboot_info():
return {
"iboot_address": "1.2.3.4",
"iboot_username": "admin",
"iboot_password": "fake",
}
def get_test_snmp_info(**kw):
result = {
"snmp_driver": kw.get("snmp_driver", "teltronix"),
"snmp_address": kw.get("snmp_address", "1.2.3.4"),
"snmp_port": kw.get("snmp_port", "161"),
"snmp_outlet": kw.get("snmp_outlet", "1"),
"snmp_version": kw.get("snmp_version", "1")
}
if result["snmp_version"] in ("1", "2c"):
result["snmp_community"] = kw.get("snmp_community", "public")
elif result["snmp_version"] == "3":
result["snmp_security"] = kw.get("snmp_security", "public")
return result
def get_test_node(**kw):
properties = {
"cpu_arch": "x86_64",
"cpus": "8",
"local_gb": "10",
"memory_mb": "4096",
}
fake_info = {"foo": "bar", "fake_password": "fakepass"}
return {
'id': kw.get('id', 123),
'name': kw.get('name', None),
'uuid': kw.get('uuid', '1be26c0b-03f2-4d2e-ae87-c02d7f33c123'),
'chassis_id': kw.get('chassis_id', None),
'conductor_affinity': kw.get('conductor_affinity', None),
'power_state': kw.get('power_state', states.NOSTATE),
'target_power_state': kw.get('target_power_state', states.NOSTATE),
'provision_state': kw.get('provision_state', states.NOSTATE),
'target_provision_state': kw.get('target_provision_state',
states.NOSTATE),
'provision_updated_at': kw.get('provision_updated_at'),
'last_error': kw.get('last_error'),
'instance_uuid': kw.get('instance_uuid'),
'instance_info': kw.get('instance_info', fake_info),
'driver': kw.get('driver', 'fake'),
'driver_info': kw.get('driver_info', fake_info),
'driver_internal_info': kw.get('driver_internal_info', fake_info),
'clean_step': kw.get('clean_step'),
'properties': kw.get('properties', properties),
'reservation': kw.get('reservation'),
'maintenance': kw.get('maintenance', False),
'maintenance_reason': kw.get('maintenance_reason'),
'console_enabled': kw.get('console_enabled', False),
'extra': kw.get('extra', {}),
'updated_at': kw.get('updated_at'),
'created_at': kw.get('created_at'),
'inspection_finished_at': kw.get('inspection_finished_at'),
'inspection_started_at': kw.get('inspection_started_at'),
}
def create_test_node(**kw):
"""Create test node entry in DB and return Node DB object.
Function to be used to create test Node objects in the database.
:param kw: kwargs with overriding values for node's attributes.
:returns: Test Node DB object.
"""
node = get_test_node(**kw)
# Let DB generate ID if it isn't specified explicitly
if 'id' not in kw:
del node['id']
dbapi = db_api.get_instance()
return dbapi.create_node(node)
def get_test_port(**kw):
return {
'id': kw.get('id', 987),
'uuid': kw.get('uuid', '1be26c0b-03f2-4d2e-ae87-c02d7f33c781'),
'node_id': kw.get('node_id', 123),
'address': kw.get('address', '52:54:00:cf:2d:31'),
'extra': kw.get('extra', {}),
'created_at': kw.get('created_at'),
'updated_at': kw.get('updated_at'),
}
def create_test_port(**kw):
"""Create test port entry in DB and return Port DB object.
Function to be used to create test Port objects in the database.
:param kw: kwargs with overriding values for port's attributes.
:returns: Test Port DB object.
"""
port = get_test_port(**kw)
# Let DB generate ID if it isn't specified explicitly
if 'id' not in kw:
del port['id']
dbapi = db_api.get_instance()
return dbapi.create_port(port)
def get_test_chassis(**kw):
return {
'id': kw.get('id', 42),
'uuid': kw.get('uuid', 'e74c40e0-d825-11e2-a28f-0800200c9a66'),
'extra': kw.get('extra', {}),
'description': kw.get('description', 'data-center-1-chassis'),
'created_at': kw.get('created_at'),
'updated_at': kw.get('updated_at'),
}
def create_test_chassis(**kw):
"""Create test chassis entry in DB and return Chassis DB object.
Function to be used to create test Chassis objects in the database.
:param kw: kwargs with overriding values for chassis's attributes.
:returns: Test Chassis DB object.
"""
chassis = get_test_chassis(**kw)
# Let DB generate ID if it isn't specified explicitly
if 'id' not in kw:
del chassis['id']
dbapi = db_api.get_instance()
return dbapi.create_chassis(chassis)
def get_test_conductor(**kw):
return {
'id': kw.get('id', 6),
'hostname': kw.get('hostname', 'test-conductor-node'),
'drivers': kw.get('drivers', ['fake-driver', 'null-driver']),
'created_at': kw.get('created_at', timeutils.utcnow()),
'updated_at': kw.get('updated_at', timeutils.utcnow()),
}
|
|
# Copyright (C) 2009, Pauli Virtanen <pav@iki.fi>
# Distributed under the same license as SciPy.
import warnings
import numpy as np
from numpy.linalg import LinAlgError
from scipy.linalg import get_blas_funcs
from .utils import make_system
from ._gcrotmk import _fgmres
__all__ = ['lgmres']
def lgmres(A, b, x0=None, tol=1e-5, maxiter=1000, M=None, callback=None,
inner_m=30, outer_k=3, outer_v=None, store_outer_Av=True,
prepend_outer_v=False, atol=None):
"""
Solve a matrix equation using the LGMRES algorithm.
The LGMRES algorithm [1]_ [2]_ is designed to avoid some problems
in the convergence in restarted GMRES, and often converges in fewer
iterations.
Parameters
----------
A : {sparse matrix, dense matrix, LinearOperator}
The real or complex N-by-N matrix of the linear system.
Alternatively, ``A`` can be a linear operator which can
produce ``Ax`` using, e.g.,
``scipy.sparse.linalg.LinearOperator``.
b : {array, matrix}
Right hand side of the linear system. Has shape (N,) or (N,1).
x0 : {array, matrix}
Starting guess for the solution.
tol, atol : float, optional
Tolerances for convergence, ``norm(residual) <= max(tol*norm(b), atol)``.
The default for ``atol`` is `tol`.
.. warning::
The default value for `atol` will be changed in a future release.
For future compatibility, specify `atol` explicitly.
maxiter : int, optional
Maximum number of iterations. Iteration will stop after maxiter
steps even if the specified tolerance has not been achieved.
M : {sparse matrix, dense matrix, LinearOperator}, optional
Preconditioner for A. The preconditioner should approximate the
inverse of A. Effective preconditioning dramatically improves the
rate of convergence, which implies that fewer iterations are needed
to reach a given error tolerance.
callback : function, optional
User-supplied function to call after each iteration. It is called
as callback(xk), where xk is the current solution vector.
inner_m : int, optional
Number of inner GMRES iterations per each outer iteration.
outer_k : int, optional
Number of vectors to carry between inner GMRES iterations.
According to [1]_, good values are in the range of 1...3.
However, note that if you want to use the additional vectors to
accelerate solving multiple similar problems, larger values may
be beneficial.
outer_v : list of tuples, optional
List containing tuples ``(v, Av)`` of vectors and corresponding
matrix-vector products, used to augment the Krylov subspace, and
carried between inner GMRES iterations. The element ``Av`` can
be `None` if the matrix-vector product should be re-evaluated.
This parameter is modified in-place by `lgmres`, and can be used
to pass "guess" vectors in and out of the algorithm when solving
similar problems.
store_outer_Av : bool, optional
Whether LGMRES should store also A*v in addition to vectors `v`
in the `outer_v` list. Default is True.
prepend_outer_v : bool, optional
Whether to put outer_v augmentation vectors before Krylov iterates.
In standard LGMRES, prepend_outer_v=False.
Returns
-------
x : array or matrix
The converged solution.
info : int
Provides convergence information:
- 0 : successful exit
- >0 : convergence to tolerance not achieved, number of iterations
- <0 : illegal input or breakdown
Notes
-----
The LGMRES algorithm [1]_ [2]_ is designed to avoid the
slowing of convergence in restarted GMRES, due to alternating
residual vectors. Typically, it often outperforms GMRES(m) of
comparable memory requirements by some measure, or at least is not
much worse.
Another advantage in this algorithm is that you can supply it with
'guess' vectors in the `outer_v` argument that augment the Krylov
subspace. If the solution lies close to the span of these vectors,
the algorithm converges faster. This can be useful if several very
similar matrices need to be inverted one after another, such as in
Newton-Krylov iteration where the Jacobian matrix often changes
little in the nonlinear steps.
References
----------
.. [1] A.H. Baker and E.R. Jessup and T. Manteuffel, "A Technique for
Accelerating the Convergence of Restarted GMRES", SIAM J. Matrix
Anal. Appl. 26, 962 (2005).
.. [2] A.H. Baker, "On Improving the Performance of the Linear Solver
restarted GMRES", PhD thesis, University of Colorado (2003).
Examples
--------
>>> from scipy.sparse import csc_matrix
>>> from scipy.sparse.linalg import lgmres
>>> A = csc_matrix([[3, 2, 0], [1, -1, 0], [0, 5, 1]], dtype=float)
>>> b = np.array([2, 4, -1], dtype=float)
>>> x, exitCode = lgmres(A, b)
>>> print(exitCode) # 0 indicates successful convergence
0
>>> np.allclose(A.dot(x), b)
True
"""
A,M,x,b,postprocess = make_system(A,M,x0,b)
if not np.isfinite(b).all():
raise ValueError("RHS must contain only finite numbers")
if atol is None:
warnings.warn("scipy.sparse.linalg.lgmres called without specifying `atol`. "
"The default value will change in the future. To preserve "
"current behavior, set ``atol=tol``.",
category=DeprecationWarning, stacklevel=2)
atol = tol
matvec = A.matvec
psolve = M.matvec
if outer_v is None:
outer_v = []
axpy, dot, scal = None, None, None
nrm2 = get_blas_funcs('nrm2', [b])
b_norm = nrm2(b)
ptol_max_factor = 1.0
for k_outer in range(maxiter):
r_outer = matvec(x) - b
# -- callback
if callback is not None:
callback(x)
# -- determine input type routines
if axpy is None:
if np.iscomplexobj(r_outer) and not np.iscomplexobj(x):
x = x.astype(r_outer.dtype)
axpy, dot, scal, nrm2 = get_blas_funcs(['axpy', 'dot', 'scal', 'nrm2'],
(x, r_outer))
# -- check stopping condition
r_norm = nrm2(r_outer)
if r_norm <= max(atol, tol * b_norm):
break
# -- inner LGMRES iteration
v0 = -psolve(r_outer)
inner_res_0 = nrm2(v0)
if inner_res_0 == 0:
rnorm = nrm2(r_outer)
raise RuntimeError("Preconditioner returned a zero vector; "
"|v| ~ %.1g, |M v| = 0" % rnorm)
v0 = scal(1.0/inner_res_0, v0)
ptol = min(ptol_max_factor, max(atol, tol*b_norm)/r_norm)
try:
Q, R, B, vs, zs, y, pres = _fgmres(matvec,
v0,
inner_m,
lpsolve=psolve,
atol=ptol,
outer_v=outer_v,
prepend_outer_v=prepend_outer_v)
y *= inner_res_0
if not np.isfinite(y).all():
# Overflow etc. in computation. There's no way to
# recover from this, so we have to bail out.
raise LinAlgError()
except LinAlgError:
# Floating point over/underflow, non-finite result from
# matmul etc. -- report failure.
return postprocess(x), k_outer + 1
# Inner loop tolerance control
if pres > ptol:
ptol_max_factor = min(1.0, 1.5 * ptol_max_factor)
else:
ptol_max_factor = max(1e-16, 0.25 * ptol_max_factor)
# -- GMRES terminated: eval solution
dx = zs[0]*y[0]
for w, yc in zip(zs[1:], y[1:]):
dx = axpy(w, dx, dx.shape[0], yc) # dx += w*yc
# -- Store LGMRES augmentation vectors
nx = nrm2(dx)
if nx > 0:
if store_outer_Av:
q = Q.dot(R.dot(y))
ax = vs[0]*q[0]
for v, qc in zip(vs[1:], q[1:]):
ax = axpy(v, ax, ax.shape[0], qc)
outer_v.append((dx/nx, ax/nx))
else:
outer_v.append((dx/nx, None))
# -- Retain only a finite number of augmentation vectors
while len(outer_v) > outer_k:
del outer_v[0]
# -- Apply step
x += dx
else:
# didn't converge ...
return postprocess(x), maxiter
return postprocess(x), 0
|
|
#!/usr/bin/env python
"""
@package ion.agents.platform.platform_driver
@file ion/agents/platform/platform_driver.py
@author Carlos Rueda
@brief Base classes supporting platform drivers.
"""
__author__ = 'Carlos Rueda'
import logging
from copy import deepcopy
from pyon.public import log
from pyon.agent.common import BaseEnum
from pyon.agent.instrument_fsm import ThreadSafeFSM
from pyon.agent.instrument_fsm import FSMError
from ion.agents.platform.exceptions import PlatformDriverException
from ion.agents.platform.exceptions import PlatformConnectionException
from ion.agents.platform.platform_agent_enums import PlatformAgentEvent
from ion.agents.platform.platform_driver_event import StateChangeDriverEvent
from ion.agents.platform.platform_driver_event import AsyncAgentEvent
class PlatformDriverState(BaseEnum):
"""
Platform driver states
"""
UNCONFIGURED = 'PLATFORM_DRIVER_STATE_UNCONFIGURED'
DISCONNECTED = 'PLATFORM_DRIVER_STATE_DISCONNECTED'
CONNECTED = 'PLATFORM_DRIVER_STATE_CONNECTED'
class PlatformDriverEvent(BaseEnum):
"""
Base events for driver state machines.
Subclasses will typically extend this class to add events for the
CONNECTED state.
"""
ENTER = 'PLATFORM_DRIVER_EVENT_ENTER'
EXIT = 'PLATFORM_DRIVER_EVENT_EXIT'
CONFIGURE = 'PLATFORM_DRIVER_EVENT_CONFIGURE'
CONNECT = 'PLATFORM_DRIVER_EVENT_CONNECT'
CONNECTION_LOST = 'PLATFORM_DRIVER_CONNECTION_LOST'
DISCONNECT = 'PLATFORM_DRIVER_EVENT_DISCONNECT'
# Events for the CONNECTED state:
PING = 'PLATFORM_DRIVER_PING'
GET = 'PLATFORM_DRIVER_GET'
SET = 'PLATFORM_DRIVER_SET'
EXECUTE = 'PLATFORM_DRIVER_EXECUTE'
class PlatformDriverCapability(BaseEnum):
"""
Subclasses will indicate the particular set of capabilities to be exposed.
"""
pass
class PlatformDriver(object):
"""
A platform driver handles a particular platform in a platform network.
This base class provides a common interface and supporting functionality.
"""
def __init__(self, pnode, event_callback,
create_event_subscriber, destroy_event_subscriber):
"""
Creates a PlatformDriver instance.
@param pnode Root PlatformNode defining the platform network
rooted at this platform.
@param event_callback Callback to notify platform agent about events
generated by this driver.
This is captured in self._send_event for this
class and subclasses to call as needed.
@param create_event_subscriber
@param destroy_event_subscriber
functions to create/destroy any needed EventSubscriber's,
in particular regarding the Managed Endpoint API.
"""
self._pnode = pnode
self._send_event = event_callback
self._create_event_subscriber = create_event_subscriber
self._destroy_event_subscriber = destroy_event_subscriber
self._platform_id = self._pnode.platform_id
if self._pnode.parent:
self._parent_platform_id = self._pnode.parent.platform_id
else:
self._parent_platform_id = None
self._driver_config = None
self._resource_schema = {}
# The parameter dictionary.
self._param_dict = {}
# construct FSM and start it with initial state UNCONFIGURED:
self._construct_fsm()
self._fsm.start(PlatformDriverState.UNCONFIGURED)
def get_platform_driver_event_class(self):
"""
Returns PlatformDriverEvent in this base class, but this is typically
overwritten.
"""
return PlatformDriverEvent
def get_platform_driver_capability_class(self):
"""
Returns PlatformDriverCapability in this base class, but this is typically
overwritten.
"""
return PlatformDriverCapability
def get_resource_capabilities(self, current_state=True, cmd_attrs=False):
"""
@param current_state
@param cmd_attrs If true, the returned commands will be the actual
attributes of the associated capability class (or
subclass) instead of the associated values.
"""
res_cmds = self._fsm.get_events(current_state)
res_cmds = self._filter_capabilities(res_cmds, cmd_attrs=cmd_attrs)
res_params = self._param_dict.keys()
return [res_cmds, res_params]
def _filter_capabilities(self, events, cmd_attrs=False):
"""
@param events the events to filter
@param cmd_attrs If true, then the actual attributes of the
PlatformDriverCapability class (or subclass) are
returned instead of the associated values.
"""
capability_class = self.get_platform_driver_capability_class()
event_values = [x for x in events if capability_class.has(x)]
if not cmd_attrs:
return event_values
# map event_values to the actual enum attributes:
event_attrs = []
for attr in dir(capability_class):
# first two checks below similar to BaseEnum.list()
if attr.startswith('__'):
continue
val = getattr(capability_class, attr)
if callable(val):
continue
if val in event_values:
event_attrs.append(attr)
return event_attrs
def get_resource_state(self, *args, **kwargs):
"""
Return the current state of the driver.
@retval str current driver state.
"""
return self._fsm.get_current_state()
def get_resource(self, *args, **kwargs):
"""
"""
return self._fsm.on_event(PlatformDriverEvent.GET, *args, **kwargs)
def set_resource(self, *args, **kwargs):
"""
"""
return self._fsm.on_event(PlatformDriverEvent.SET, *args, **kwargs)
def execute_resource(self, resource_cmd, *args, **kwargs):
"""
Platform agent calls this directly to trigger the execution of a
resource command. The actual action occurs in execute.
"""
return self._fsm.on_event(PlatformDriverEvent.EXECUTE, resource_cmd, *args, **kwargs)
def validate_driver_configuration(self, driver_config):
"""
Called by configure so a subclass can perform any needed additional
validation of the provided configuration.
Nothing is done in this base class. Note that basic validation is
done by PlatformAgent prior to creating/configuring the driver.
@param driver_config Driver configuration.
@raise PlatformDriverException Error in driver configuration.
"""
pass
def configure(self, driver_config):
"""
Configures this driver. In this base class it basically
calls validate_driver_configuration and then assigns the given
config to self._driver_config.
@param driver_config Driver configuration.
"""
if log.isEnabledFor(logging.DEBUG):
log.debug("%r: configure: %s" % (self._platform_id, str(driver_config)))
self.validate_driver_configuration(driver_config)
self._driver_config = driver_config
#self._param_dict = deepcopy(self._driver_config.get('attributes',{}))
def get_config_metadata(self):
"""
"""
return deepcopy(self._resource_schema)
def connect(self, recursion=None):
"""
To be implemented by subclass.
Establishes communication with the platform device.
@raise PlatformConnectionException
"""
raise NotImplementedError() #pragma: no cover
def disconnect(self, recursion=None):
"""
To be implemented by subclass.
Ends communication with the platform device.
@raise PlatformConnectionException
"""
raise NotImplementedError() #pragma: no cover
def ping(self):
"""
To be implemented by subclass.
Verifies communication with external platform returning "PONG" if
this verification completes OK.
@retval "PONG"
@raise PlatformConnectionException If the connection to the external
platform is lost.
"""
raise NotImplementedError() #pragma: no cover
def get_attributes(self):
"""
To be implemented by subclass.
Returns the attributes of this platform. This is used by the agent
for attribute monitoring purposes.
@retval {attr_id: dict, ...}
dict indexed by attribute ID with associated properties.
attr_id is in particular used during get_attribute_values
calls to retrieve values during resource monitoring.
The dict for each attribute should contain the following properties:
- monitor_cycle_seconds: nominal period in seconds for monitoring
@raise PlatformConnectionException If the connection to the external
platform is lost.
"""
raise NotImplementedError() #pragma: no cover
def get_attribute_values(self, attrs):
"""
To be implemented by subclass.
Returns the values for specific attributes since a given time for
each attribute.
@param attrs [(attr_id, from_time), ...] desired attributes.
from_time Assummed to be in the format basically described by
pyon's get_ion_ts function, "a str representing an
integer number, the millis in UNIX epoch."
@retval {attr_id : [(attrValue, timestamp), ...], ...}
dict indexed by attribute name with list of (value, timestamp)
pairs. Timestamps in same format as from_time.
@raise PlatformConnectionException If the connection to the external
platform is lost.
"""
raise NotImplementedError() #pragma: no cover
def supports_set_operation(self):
"""
@return True only if the SET operation is supported by this driver.
"""
return False
def set_attribute_values(self, attrs):
"""
Sets values for writable attributes in this platform.
Only called by SET handler when supports_set_operation() returns True.
@param attrs [(attr_id, attrValue), ...] List of attribute values
@retval {attr_id : [(attrValue, timestamp), ...], ...}
dict with a list of (value,timestamp) pairs for each attribute
indicated in the input. Returned timestamps indicate the time when the
value was set. Each timestamp is "a str representing an
integer number, the millis in UNIX epoch" to
align with description of pyon's get_ion_ts function.
@raise PlatformConnectionException If the connection to the external
platform is lost.
"""
raise NotImplementedError() #pragma: no cover
def execute(self, cmd, *args, **kwargs):
"""
Executes the given command.
Subclasses can override to execute particular commands or delegate to
its super class. However, note that this base class raises
NotImplementedError.
@param cmd command
@param args command's args
@param kwargs command's kwargs
@return result of the execution
@raise PlatformConnectionException If the connection to the external
platform is lost.
"""
raise NotImplementedError() # pragma: no cover
def get(self, *args, **kwargs):
"""
Gets the values of the requested attributes.
Subclasses can override to get particular attributes and
delegate to this base implementation to handle common attributes.
@param args get's args
@param kwargs get's kwargs
@return result of the retrieval.
@raise PlatformConnectionException If the connection to the external
platform is lost.
"""
raise NotImplementedError() # pragma: no cover
def destroy(self):
"""
Stops all activity done by the driver. Nothing done in this class.
"""
pass
def get_driver_state(self):
"""
Returns the current FSM state.
"""
return self._fsm.get_current_state()
#####################################################################
# Supporting method for handling connection lost in CONNECT handlers
#####################################################################
def _connection_lost(self, cmd, args, kwargs, exc=None):
"""
Supporting method to be called by any CONNECTED handler right after
detecting that the connection with the external platform device has
been lost. It does a regular disconnect() and notifies the agent about
the lost connection. Note that the call to disconnect() itself may
throw some additional exception very likely caused by the fact that
the connection is lost--this exception is just logged out but ignored.
All parameters are for logging purposes.
@param cmd string indicating the command that was attempted
@param args args of the command that was attempted
@param kwargs kwargs of the command that was attempted
@param exc associated exception (if any),
@return (next_state, result) suitable as the return of the FSM
handler where the connection lost was detected. The
next_state will always be PlatformDriverState.DISCONNECTED.
"""
log.debug("%r: (LC) _connection_lost: cmd=%s, args=%s, kwargs=%s, exc=%s",
self._platform_id, cmd, args, kwargs, exc)
result = None
try:
result = self.disconnect()
except Exception as e:
# just log a message
log.debug("%r: (LC) ignoring exception while calling disconnect upon"
" lost connection: %s", self._platform_id, e)
# in any case, notify the agent about the lost connection and
# transition to DISCONNECTED:
self._send_event(AsyncAgentEvent(PlatformAgentEvent.LOST_CONNECTION))
next_state = PlatformDriverState.DISCONNECTED
return next_state, result
##############################################################
# FSM event handlers.
##############################################################
def _common_state_enter(self, *args, **kwargs):
"""
Common work upon every state entry.
"""
state = self.get_driver_state()
log.debug('%r: driver entering state: %s', self._platform_id, state)
self._send_event(StateChangeDriverEvent(state))
def _common_state_exit(self, *args, **kwargs):
"""
Common work upon every state exit.
Nothing done in this base class.
"""
##############################################################
# UNCONFIGURED event handlers.
##############################################################
def _handler_unconfigured_configure(self, *args, **kwargs):
"""
"""
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r/%s args=%s kwargs=%s" % (
self._platform_id, self.get_driver_state(),
str(args), str(kwargs)))
driver_config = kwargs.get('driver_config', None)
if driver_config is None:
raise FSMError('configure: missing driver_config argument')
try:
result = self.configure(driver_config)
next_state = PlatformDriverState.DISCONNECTED
except PlatformDriverException as e:
result = None
next_state = None
log.error("%r: Error in platform driver configuration", self._platform_id, e)
return next_state, result
##############################################################
# DISCONNECTED event handlers.
##############################################################
def _handler_disconnected_connect(self, *args, **kwargs):
"""
"""
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r/%s args=%s kwargs=%s" % (
self._platform_id, self.get_driver_state(),
str(args), str(kwargs)))
recursion = kwargs.get('recursion', None)
self.connect(recursion=recursion)
result = next_state = PlatformDriverState.CONNECTED
return next_state, result
def _handler_disconnected_disconnect(self, *args, **kwargs):
"""
We allow the DISCONNECT event in DISCONNECTED state for convenience,
in particular it facilitates the overall handling of the connection_lost
event, which is processed by a subsequent call to disconnect from the
platform agent. The handler here does nothing.
"""
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r/%s args=%s kwargs=%s" % (
self._platform_id, self.get_driver_state(),
str(args), str(kwargs)))
return None, None
###########################################################################
# CONNECTED event handlers.
# Except for the explicit disconnect and connection_lost handlers, the
# CONNECTED handlers (here and in subclasses) should directly catch any
# PlatformConnectionException to call _connection_lost.
###########################################################################
def _handler_connected_disconnect(self, *args, **kwargs):
"""
"""
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r/%s args=%s kwargs=%s" % (
self._platform_id, self.get_driver_state(),
str(args), str(kwargs)))
recursion = kwargs.get('recursion', None)
result = self.disconnect(recursion=recursion)
next_state = PlatformDriverState.DISCONNECTED
return next_state, result
def _handler_connected_connection_lost(self, *args, **kwargs):
"""
The connection was lost (as opposed to a normal disconnect request).
Here we do the regular disconnect but also notify the platform agent
about the lost connection.
NOTE: this handler in the FSM is provided in case there is a need to
directly trigger the associated transition along with the associated
notification to the agent. However, the typical case is that a CONNECTED
handler dealing with commands will catch any PlatformConnectionException
to call _connection_lost directly.
"""
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r/%s args=%s kwargs=%s" % (
self._platform_id, self.get_driver_state(),
str(args), str(kwargs)))
# just use our supporting method:
return self._connection_lost(PlatformDriverEvent.CONNECTION_LOST, args, kwargs)
def _handler_connected_ping(self, *args, **kwargs):
"""
"""
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r/%s args=%s kwargs=%s" % (
self._platform_id, self.get_driver_state(),
str(args), str(kwargs)))
try:
result = self.ping()
return None, result
except PlatformConnectionException as e:
return self._connection_lost(PlatformDriverEvent.PING, args, kwargs, e)
def _handler_connected_get(self, *args, **kwargs):
"""
"""
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r/%s args=%s kwargs=%s" % (
self._platform_id, self.get_driver_state(),
str(args), str(kwargs)))
try:
result = self.get(*args, **kwargs)
return None, result
except PlatformConnectionException as e:
return self._connection_lost(PlatformDriverEvent.GET, args, kwargs, e)
def _handler_connected_set(self, *args, **kwargs):
"""
"""
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r/%s args=%s kwargs=%s" % (
self._platform_id, self.get_driver_state(),
str(args), str(kwargs)))
if not self.supports_set_operation():
raise FSMError('Unsupported operation: %s' % PlatformDriverEvent.SET)
attrs = kwargs.get('attrs', None)
if attrs is None:
raise FSMError('set_attribute_values: missing attrs argument')
try:
result = self.set_attribute_values(attrs)
return None, result
except PlatformConnectionException as e:
return self._connection_lost(PlatformDriverEvent.SET, args, kwargs, e)
def _handler_connected_execute(self, *args, **kwargs):
"""
"""
if log.isEnabledFor(logging.TRACE): # pragma: no cover
log.trace("%r/%s args=%s kwargs=%s" % (
self._platform_id, self.get_driver_state(),
str(args), str(kwargs)))
if len(args) == 0:
raise FSMError('execute_resource: missing resource_cmd argument')
try:
result = self.execute(*args, **kwargs)
return None, result
except PlatformConnectionException as e:
return self._connection_lost(PlatformDriverEvent.EXECUTE, args, kwargs, e)
##############################################################
# Platform driver FSM setup
##############################################################
def _construct_fsm(self, states=PlatformDriverState,
events=PlatformDriverEvent,
enter_event=PlatformDriverEvent.ENTER,
exit_event=PlatformDriverEvent.EXIT):
"""
Constructs the FSM for the driver. The preparations here are mostly
related with the UNCONFIGURED, DISCONNECTED, and CONNECTED state
transitions, with some common handlers for the CONNECTED state.
Subclasses can override to indicate specific parameters and add new
handlers (typically for the CONNECTED state).
"""
log.debug("%r: constructing base platform driver FSM", self._platform_id)
self._fsm = ThreadSafeFSM(states, events, enter_event, exit_event)
for state in PlatformDriverState.list():
self._fsm.add_handler(state, enter_event, self._common_state_enter)
self._fsm.add_handler(state, exit_event, self._common_state_exit)
# UNCONFIGURED state event handlers:
self._fsm.add_handler(PlatformDriverState.UNCONFIGURED, PlatformDriverEvent.CONFIGURE, self._handler_unconfigured_configure)
# DISCONNECTED state event handlers:
self._fsm.add_handler(PlatformDriverState.DISCONNECTED, PlatformDriverEvent.CONNECT, self._handler_disconnected_connect)
self._fsm.add_handler(PlatformDriverState.DISCONNECTED, PlatformDriverEvent.DISCONNECT, self._handler_disconnected_disconnect)
# CONNECTED state event handlers:
self._fsm.add_handler(PlatformDriverState.CONNECTED, PlatformDriverEvent.DISCONNECT, self._handler_connected_disconnect)
self._fsm.add_handler(PlatformDriverState.CONNECTED, PlatformDriverEvent.CONNECTION_LOST, self._handler_connected_connection_lost)
self._fsm.add_handler(PlatformDriverState.CONNECTED, PlatformDriverEvent.PING, self._handler_connected_ping)
self._fsm.add_handler(PlatformDriverState.CONNECTED, PlatformDriverEvent.GET, self._handler_connected_get)
self._fsm.add_handler(PlatformDriverState.CONNECTED, PlatformDriverEvent.SET, self._handler_connected_set)
self._fsm.add_handler(PlatformDriverState.CONNECTED, PlatformDriverEvent.EXECUTE, self._handler_connected_execute)
|
|
# Copyright (c) 2016-2020, The Bifrost Authors. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Bifrost Authors nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# TODO: Add tests for data streams spanning multiple files
import unittest
import bifrost as bf
from bifrost.blocks import *
import os
import shutil
class TemporaryDirectory(object):
def __init__(self, path):
self.path = path
os.makedirs(self.path)
def remove(self):
shutil.rmtree(self.path)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.remove()
def get_sigproc_file_size(filename):
"""Returns the header and data size of a sigproc file without reading
the whole file.
"""
with open(filename, 'rb') as f:
head = ''
while 'HEADER_END' not in head:
more_data = f.read(4096)
try:
more_data = more_data.decode(errors='replace')
except AttributeError:
# Python2 catch
pass
if len(more_data) == 0:
raise IOError("Not a valid sigproc file: " + filename)
head += more_data
hdr_size = head.find('HEADER_END') + len('HEADER_END')
file_size = os.path.getsize(filename)
data_size = file_size - hdr_size
return hdr_size, data_size
def rename_sequence(hdr, name):
hdr['name'] = name
return hdr
def rename_sequence(hdr, name):
hdr['name'] = name
return hdr
class SerializeTest(unittest.TestCase):
def setUp(self):
self.fil_file = "./data/2chan16bitNoDM.fil"
# Note: This is specific to 2chan16bitNoDM.fil
self.time_tag = 3493024746386227200
hdr_size, self.data_size = get_sigproc_file_size(self.fil_file)
with open(self.fil_file, 'rb') as f:
self.data = f.read()
self.data = self.data[hdr_size:]
self.temp_path = '/tmp/bifrost_test_serialize'
self.basename = os.path.basename(self.fil_file)
self.basepath = os.path.join(self.temp_path, self.basename)
self.gulp_nframe = 101
def run_test_serialize_with_name_no_ringlets(self, gulp_nframe_inc=0):
with bf.Pipeline() as pipeline:
data = read_sigproc([self.fil_file], self.gulp_nframe, core=0)
for i in range(5):
if gulp_nframe_inc != 0:
data = copy(data,
gulp_nframe=self.gulp_nframe+i*gulp_nframe_inc)
else:
data = copy(data)
data = serialize(data, self.temp_path, core=0)
with TemporaryDirectory(self.temp_path):
pipeline.run()
# Note: SerializeBlock uses os.path.basename if path is given
hdrpath = self.basepath + '.bf.json'
datpath = self.basepath + '.bf.' + '0' * 12 + '.dat'
self.assertTrue(os.path.exists(hdrpath))
self.assertTrue(os.path.exists(datpath))
self.assertEqual(os.path.getsize(datpath), self.data_size)
with open(datpath, 'rb') as f:
data = f.read()
self.assertEqual(data, self.data)
def test_serialize_with_name_no_ringlets(self):
self.run_test_serialize_with_name_no_ringlets()
self.run_test_serialize_with_name_no_ringlets(gulp_nframe_inc=1)
self.run_test_serialize_with_name_no_ringlets(gulp_nframe_inc=3)
def test_serialize_with_time_tag_no_ringlets(self):
with bf.Pipeline() as pipeline:
data = read_sigproc([self.fil_file], self.gulp_nframe)
# Custom view sets sequence name to '', which causes SerializeBlock
# to use the time_tag instead.
data = bf.views.custom(data, lambda hdr: rename_sequence(hdr, ''))
data = serialize(data, self.temp_path)
with TemporaryDirectory(self.temp_path):
pipeline.run()
basepath = os.path.join(self.temp_path,
'%020i' % self.time_tag)
hdrpath = basepath + '.bf.json'
datpath = basepath + '.bf.' + '0' * 12 + '.dat'
self.assertTrue(os.path.exists(hdrpath))
self.assertTrue(os.path.exists(datpath))
self.assertEqual(os.path.getsize(datpath), self.data_size)
with open(datpath, 'rb') as f:
data = f.read()
self.assertEqual(data, self.data)
def test_serialize_with_name_and_ringlets(self):
with bf.Pipeline() as pipeline:
data = read_sigproc([self.fil_file], self.gulp_nframe)
# Transpose so that freq becomes a ringlet dimension
# TODO: Test multiple ringlet dimensions (e.g., freq + pol) once
# SerializeBlock supports it.
data = transpose(data, ['freq', 'time', 'pol'])
data = serialize(data, self.temp_path)
with TemporaryDirectory(self.temp_path):
pipeline.run()
# Note: SerializeBlock uses os.path.basename if path is given
hdrpath = self.basepath + '.bf.json'
datpath0 = self.basepath + '.bf.' + '0' * 12 + '.0.dat'
datpath1 = self.basepath + '.bf.' + '0' * 12 + '.1.dat'
self.assertTrue(os.path.exists(hdrpath))
self.assertTrue(os.path.exists(datpath0))
self.assertTrue(os.path.exists(datpath1))
self.assertEqual(os.path.getsize(datpath0),
self.data_size // 2)
self.assertEqual(os.path.getsize(datpath1),
self.data_size // 2)
def test_deserialize_no_ringlets(self):
with TemporaryDirectory(self.temp_path):
with bf.Pipeline() as pipeline:
data = read_sigproc([self.fil_file], self.gulp_nframe)
serialize(data, self.temp_path)
pipeline.run()
datpath = self.basepath + '.bf.' + '0' * 12 + '.dat'
with bf.Pipeline() as pipeline:
data = deserialize([self.basepath + '.bf'], self.gulp_nframe)
# Note: Must rename the sequence to avoid overwriting the input
# file.
data = bf.views.custom(
data, lambda hdr: rename_sequence(hdr, hdr['name'] + '.2'))
serialize(data, self.temp_path)
pipeline.run()
datpath = self.basepath + '.2.bf.' + '0' * 12 + '.dat'
with open(datpath, 'rb') as f:
data = f.read()
self.assertEqual(len(data), len(self.data))
self.assertEqual(data, self.data)
def test_deserialize_with_ringlets(self):
with TemporaryDirectory(self.temp_path):
with bf.Pipeline() as pipeline:
data = read_sigproc([self.fil_file], self.gulp_nframe)
data = transpose(data, ['freq', 'time', 'pol'])
serialize(data, self.temp_path)
pipeline.run()
datpath = self.basepath + '.bf.' + '0' * 12 + '.dat'
with bf.Pipeline() as pipeline:
data = deserialize([self.basepath + '.bf'], self.gulp_nframe)
# Note: Must rename the sequence to avoid overwriting the input
# file.
data = bf.views.custom(
data, lambda hdr: rename_sequence(hdr, hdr['name'] + '.2'))
serialize(data, self.temp_path)
pipeline.run()
hdrpath = self.basepath + '.2.bf.json'
datpath0 = self.basepath + '.2.bf.' + '0' * 12 + '.0.dat'
datpath1 = self.basepath + '.2.bf.' + '0' * 12 + '.1.dat'
self.assertTrue(os.path.exists(hdrpath))
self.assertTrue(os.path.exists(datpath0))
self.assertTrue(os.path.exists(datpath1))
self.assertEqual(os.path.getsize(datpath0),
self.data_size // 2)
self.assertEqual(os.path.getsize(datpath1),
self.data_size // 2)
|
|
import numpy as np
from stingray.events import EventList
from stingray.varenergyspectrum import VarEnergySpectrum, RmsEnergySpectrum
from stingray.varenergyspectrum import LagEnergySpectrum
from stingray.varenergyspectrum import ExcessVarianceSpectrum
from stingray.lightcurve import Lightcurve
from astropy.tests.helper import pytest
np.random.seed(20150907)
class DummyVarEnergy(VarEnergySpectrum):
def _spectrum_function(self):
return None, None
class TestExcVarEnergySpectrum(object):
@classmethod
def setup_class(cls):
from ..simulator.simulator import Simulator
simulator = Simulator(0.1, 10000, rms=0.4, mean=200)
test_lc = simulator.simulate(1)
cls.test_ev1, cls.test_ev2 = EventList(), EventList()
cls.test_ev1.simulate_times(test_lc)
cls.test_ev1.energy = np.random.uniform(0.3, 12,
len(cls.test_ev1.time))
def test_allocate(self):
exv = ExcessVarianceSpectrum(self.test_ev1, [0., 100],
(0.3, 12, 5, "lin"),
bin_time=1,
segment_size=100)
class TestVarEnergySpectrum(object):
@classmethod
def setup_class(cls):
tstart = 0.0
tend = 100.0
nphot = 1000
alltimes = np.random.uniform(tstart, tend, nphot)
alltimes.sort()
cls.events = EventList(alltimes,
energy=np.random.uniform(0.3, 12, nphot),
gti = [[tstart, tend]])
cls.vespec = DummyVarEnergy(cls.events, [0., 10000],
(0.5, 5, 10, "lin"), [0.3, 10],
bin_time=0.1)
cls.vespeclog = \
DummyVarEnergy(cls.events, [0., 10000],
(0.5, 5, 10, "log"), [0.3, 10])
def test_intervals_overlapping(self):
ref_int = self.vespec._decide_ref_intervals([0.5, 6], [0.3, 10])
np.testing.assert_allclose(ref_int, [[0.3, 0.5], [6, 10]])
ref_int = self.vespec._decide_ref_intervals([0.5, 11], [0.3, 10])
np.testing.assert_allclose(ref_int, [[0.3, 0.5]])
def test_intervals_non_overlapping(self):
ref_int = self.vespec._decide_ref_intervals([6, 11], [0.3, 5])
np.testing.assert_allclose(ref_int, [[0.3, 5]])
def test_ref_band_none(self):
events = EventList([0.09, 0.21, 0.23, 0.32, 0.4, 0.54],
energy=[0,0,0,0,1,1],
gti=[[0, 0.65]])
vespec = DummyVarEnergy(events, [0., 10000],
(0, 1, 2, "lin"),
bin_time=0.1)
assert np.all(vespec.ref_band == np.array([[0, np.inf]]))
def test_energy_spec_wrong_list_not_tuple(self):
events = EventList([0.09, 0.21, 0.23, 0.32, 0.4, 0.54],
energy=[0, 0, 0, 0, 1, 1],
gti=[[0, 0.65]])
# Test using a list instead of tuple
# with pytest.raises(ValueError):
vespec = DummyVarEnergy(events, [0., 10000],
[0, 1, 2, "lin"],
bin_time=0.1)
def test_energy_spec_wrong_str(self):
events = EventList([0.09, 0.21, 0.23, 0.32, 0.4, 0.54],
energy=[0, 0, 0, 0, 1, 1],
gti=[[0, 0.65]])
# Test using a list instead of tuple
with pytest.raises(ValueError):
vespec = DummyVarEnergy(events, [0., 10000],
(0, 1, 2, "xxx"),
bin_time=0.1)
def test_construct_lightcurves(self):
events = EventList([0.09, 0.21, 0.23, 0.32, 0.4, 0.54],
energy=[0,0,0,0,1,1],
gti=[[0, 0.65]])
vespec = DummyVarEnergy(events, [0., 10000],
(0, 1, 2, "lin"), [0.5, 1.1],
bin_time=0.1)
base_lc, ref_lc = \
vespec._construct_lightcurves([0, 0.5],
tstart=0, tstop=0.65)
np.testing.assert_allclose(base_lc.counts, [1, 0, 2, 1, 0, 0])
np.testing.assert_allclose(ref_lc.counts, [0, 0, 0, 1, 0, 1])
def test_construct_lightcurves_no_exclude(self):
events = EventList([0.09, 0.21, 0.23, 0.32, 0.4, 0.54],
energy=[0,0,0,0,1,1],
gti=[[0, 0.65]])
vespec = DummyVarEnergy(events, [0., 10000],
(0, 1, 2, "lin"), [0, 0.5],
bin_time=0.1)
base_lc, ref_lc = \
vespec._construct_lightcurves([0, 0.5],
tstart=0, tstop=0.65,
exclude=False)
np.testing.assert_equal(base_lc.counts, ref_lc.counts)
def test_construct_lightcurves_pi(self):
events = EventList([0.09, 0.21, 0.23, 0.32, 0.4, 0.54],
pi=np.asarray([0, 0, 0, 0, 1, 1]),
gti=[[0, 0.65]])
vespec = DummyVarEnergy(events, [0., 10000],
(0, 1, 2, "lin"), [0.5, 1.1], use_pi=True,
bin_time=0.1)
base_lc, ref_lc = \
vespec._construct_lightcurves([0, 0.5],
tstart=0, tstop=0.65)
np.testing.assert_allclose(base_lc.counts, [1, 0, 2, 1, 0, 0])
np.testing.assert_allclose(ref_lc.counts, [0, 0, 0, 1, 0, 1])
class TestRMSEnergySpectrum(object):
@classmethod
def setup_class(cls):
from ..simulator.simulator import Simulator
simulator = Simulator(0.1, 1000, rms=0.4, mean=200)
test_lc = simulator.simulate(1)
test_ev1, test_ev2 = EventList(), EventList()
test_ev1.simulate_times(test_lc)
test_ev2.simulate_times(test_lc)
test_ev1.energy = np.random.uniform(0.3, 12, len(test_ev1.time))
test_ev2.energy = np.random.uniform(0.3, 12, len(test_ev2.time))
cls.rms = RmsEnergySpectrum(test_ev1, [0., 100],
(0.3, 12, 5, "lin"),
bin_time=0.01,
segment_size=100,
events2=test_ev2)
def test_correct_rms_values(self):
# Assert that it is close to 0.4 (since we don't have infinite spectral
# coverage, it will be a little less!)
assert np.allclose(self.rms.spectrum, 0.4, 0.05)
def test_correct_rms_errorbars(self):
# Assert that the rms measured at all energies is the same
assert np.all(
np.abs(self.rms.spectrum - self.rms.spectrum[0]) < \
self.rms.spectrum_error)
def test_rms_invalid_evlist_warns(self):
ev = EventList(time=[], energy=[], gti=self.rms.events1.gti)
with pytest.warns(UserWarning) as record:
rms = RmsEnergySpectrum(ev, [0., 100],
(0.3, 12, 5, "lin"),
bin_time=0.01,
segment_size=100,
events2=self.rms.events2)
assert np.allclose(rms.spectrum, 0)
assert np.allclose(rms.spectrum_error, 0)
class TestLagEnergySpectrum(object):
@classmethod
def setup_class(cls):
from ..simulator.simulator import Simulator
dt = 0.1
simulator = Simulator(dt, 1000, rms=0.4, mean=200)
test_lc1 = simulator.simulate(2)
test_lc2 = Lightcurve(test_lc1.time,
np.array(np.roll(test_lc1.counts, 2)),
err_dist=test_lc1.err_dist,
dt=dt)
test_ev1, test_ev2 = EventList(), EventList()
test_ev1.simulate_times(test_lc1)
test_ev2.simulate_times(test_lc2)
test_ev1.energy = np.random.uniform(0.3, 9, len(test_ev1.time))
test_ev2.energy = np.random.uniform(9, 12, len(test_ev2.time))
cls.lag = LagEnergySpectrum(test_ev1, [0., 0.5],
(0.3, 9, 4, "lin"), [9, 12],
bin_time=0.1,
segment_size=30,
events2=test_ev2)
def test_lagspectrum_values_and_errors(self):
assert np.all(np.abs(self.lag.spectrum - 0.2) < \
3 * self.lag.spectrum_error)
def test_lag_invalid_evlist_warns(self):
ev = EventList(time=[], energy=[], gti=self.lag.events1.gti)
with pytest.warns(UserWarning) as record:
lag = LagEnergySpectrum(ev, [0., 0.5],
(0.3, 9, 4, "lin"), [9, 12],
bin_time=0.1,
segment_size=30,
events2=self.lag.events2)
assert np.allclose(lag.spectrum, 0)
assert np.allclose(lag.spectrum_error, 0)
|
|
from django.conf import settings
config = settings.CSV2_CONFIG
from django.views.decorators.csrf import requires_csrf_token
from cloudscheduler.lib.select_ec2 import \
select_ec2_images, \
select_ec2_instance_types
from cloudscheduler.lib.view_utils import \
lno, \
qt, \
qt_filter_get, \
render, \
set_user_groups, \
table_fields, \
validate_fields
from cloudscheduler.lib.schema import *
from cloudscheduler.lib.signal_functions import event_signal_send
from cloudscheduler.lib.log_tools import get_frame_info
from cloudscheduler.lib.web_profiler import silk_profile as silkp
import json
from django.core.serializers.json import DjangoJSONEncoder
# lno: EV - error code identifier.
MODID = 'EC2'
#-------------------------------------------------------------------------------
@silkp(name="EC2 Images List")
@requires_csrf_token
def images(request, message=None, response_code=0):
keys = {
'auto_active_group': True,
# Named argument formats (anything else is a string).
'format': {
'cloud_name': 'lower',
'architectures': ('view_ec2_images', 'arch', True, True),
'like': 'lower',
'not_like': 'lower',
'operating_systems': ('view_ec2_images', 'opsys', True, True),
'owner_aliases': 'lower',
'owner_ids': 'lower',
'csrfmiddlewaretoken': 'ignore',
'group': 'ignore',
},
'mandatory': [
'cloud_name',
],
'allow_empty': [
'like',
'not_like',
'owner_aliases',
'owner_ids'
]
}
# open the database.
config.db_open()
# Retrieve the active user, associated group list and optionally set the active group.
rc, msg, active_user = set_user_groups(config, request, super_user=False)
if rc != 0:
config.db_close()
return render(request, 'csv2/ec2_images.html', {'response_code': 1, 'message': '%s %s' % (lno(MODID), msg)})
if request.method == 'POST':
# Validate input fields.
rc, msg, fields, tables, columns = validate_fields(config, request, [keys], ['ec2_image_filters'], active_user)
if rc != 0:
config.db_close()
return render(request, 'csv2/ec2_images.html', {'response_code': 1, 'message': '%s ec2 images, %s' % (lno(MODID), msg)})
# Update the user.
table = 'ec2_image_filters'
where_clause = "group_name='%s' and cloud_name='%s'" % (active_user.active_group, fields['cloud_name'])
rc, msg = config.db_update(table, table_fields(fields, table, columns, 'update'), where=where_clause)
if rc != 0:
config.db_close()
return render(request, 'csv2/ec2_images.html', {'response_code': 1, 'message': '%s ec2 images, %s' % (lno(MODID), msg)})
config.db_commit()
event_signal_send(config, "update_ec2_images")
response_code = 0
message = "update successful"
active_user.kwargs['cloud_name'] = fields['cloud_name']
# Retrieve EC2 image filters.
where_clause = "group_name='%s' and cloud_name='%s'" % (active_user.active_group, active_user.kwargs['cloud_name'])
rc, msg, ec2_image_filters = cofnig.db_query("ec2_image_filters", where=where_clause)
ec2_image_filters_json = json.dumps( ec2_image_filters );
# Retrieve EC2 image filter options.
rc, msg = config.db_execute('select distinct arch as architecture from view_ec2_images order by architecture')
architectures = []
for row in config.db_cursor:
architectures.append(row)
rc, msg = config.db_execute('select distinct opsys as operating_system from view_ec2_images order by operating_system')
operating_systems = []
for row in config.db_cursor:
operating_systems.append(row)
rc, msg = config.db_execute('select distinct alias from ec2_image_well_known_owner_aliases order by alias')
owner_aliases = []
for row in config.db_cursor:
owner_aliases.append(row)
arch_list = []
for arch in architectures:
for value in arch.values():
arch_list.append(value)
os_list = []
for os in operating_systems:
for value in os.values():
os_list.append(value)
alias_list = []
for alias in owner_aliases:
for value in alias.values():
alias_list.append(value)
# Retrieve EC2 images.
rc, msg, sql_select = select_ec2_images(config, active_user.active_group, active_user.kwargs['cloud_name'])
if rc != 0:
config.db_close()
return render(request, 'csv2/ec2_images.html', {'response_code': 1, 'message': '%s ec2 images, %s' % (lno(MODID), msg)})
rc, msg = config.db_execute(sql_select)
ec2_images = []
for row in config.db_cursor:
ec2_images.append(row)
config.db_close()
# Render the page.
context = {
'active_user': active_user.username,
'active_group': active_user.active_group,
'user_groups': active_user.user_groups,
'ec2_image_filters': ec2_image_filters,
'ec2_image_filters_json': ec2_image_filters_json,
'ec2_images': ec2_images,
'architectures': architectures,
'operating_systems': operating_systems,
'owner_aliases': owner_aliases,
'arch_list': arch_list,
'os_list': os_list,
'alias_list': alias_list,
'response_code': response_code,
'message': message,
'is_superuser': active_user.is_superuser,
'version': config.get_version()
}
return render(request, 'csv2/ec2_images.html', context)
#-------------------------------------------------------------------------------
@silkp(name="EC2 Instance Type List")
@requires_csrf_token
def instance_types(request, message=None, response_code=0):
keys = {
'auto_active_group': True,
# Named argument formats (anything else is a string).
'format': {
'cloud_name': 'lower',
'cores': ('view_ec2_instance_types', 'cores', True, True),
'families': ('view_ec2_instance_types', 'instance_family', True, True),
'memory_min_gigabytes_per_core': 'float',
'memory_max_gigabytes_per_core': 'float',
'operating_systems': ('view_ec2_instance_types', 'operating_system', True, True),
'processors': ('view_ec2_instance_types', 'processor', True, True),
'processor_manufacturers': ('view_ec2_instance_types', 'processor_manufacturer', True, True),
'csrfmiddlewaretoken': 'ignore',
'group': 'ignore',
},
'mandatory': [
'cloud_name',
],
}
# open the database.
config.db_open()
# Retrieve the active user, associated group list and optionally set the active group.
rc, msg, active_user = set_user_groups(config, request, super_user=False)
if rc != 0:
config.db_close()
return render(request, 'csv2/ec2_instance_types.html', {'response_code': 1, 'message': '%s %s' % (lno(MODID), msg)})
if request.method == 'POST':
# Validate input fields.
rc, msg, fields, tables, columns = validate_fields(config, request, [keys], ['ec2_instance_type_filters'], active_user)
if rc != 0:
config.db_close()
return render(request, 'csv2/ec2_instance_types.html', {'response_code': 1, 'message': '%s ec2 instance-types, %s' % (lno(MODID), msg)})
# Update the user.
table = tables['ec2_instance_type_filters']
rc, msg = config.db_session_execute(table.update().where((table.c.group_name==active_user.active_group) & (table.c.cloud_name==fields['cloud_name'])).values(table_fields(fields, table, columns, 'update')))
if rc != 0:
config.db_close()
return render(request, 'csv2/ec2_instance_types.html', {'response_code': 1, 'message': '%s ec2 instance-types, %s' % (lno(MODID), msg)})
config.db_commit()
event_signal_send(config, "update_ec2_instance_types")
response_code = 0
message = "update successful"
active_user.kwargs['cloud_name'] = fields['cloud_name']
# Retrieve EC2 instance type filters.
where_clause = 'group_name="%s" and cloud_name="%s"' % (active_user.active_group, active_user.kwargs['cloud_name'])
rc, msg, ec2_instance_type_filters = config.db_query("ec2_instance_type_filters", where=where_clause)
ec2_instance_type_filters_json = json.dumps( ec2_instance_type_filters, cls=DjangoJSONEncoder );
# Retrieve EC2 instance type filter options.
rc, msg = config.db_execute('select distinct instance_family from view_ec2_instance_types order by instance_family')
families = []
for row in config.db_cursor:
families.append(row)
rc, msg = config.db_execute('select distinct operating_system from view_ec2_instance_types order by operating_system')
operating_systems = []
for row in config.db_cursor:
families.append(row)
rc, msg = config.db_execute('select distinct processor from view_ec2_instance_types order by processor')
processors = []
for row in config.db_cursor:
families.append(row)
rc, msg = config.db_execute('select distinct processor_manufacturer from view_ec2_instance_types order by processor_manufacturer')
manufacturers = []
for row in config.db_cursor:
families.append(row)
rc, msg = config.db_execute('select distinct cores from view_ec2_instance_types order by cores')
cores = []
for row in config.db_cursor:
families.append(row)
families_list = []
for family in families:
for value in family.values():
families_list.append(value)
os_list = []
for os in operating_systems:
for value in os.values():
os_list.append(value)
proc_list = []
for proc in processors:
for value in proc.values():
proc_list.append(value)
manu_list = []
for manu in manufacturers:
for value in manu.values():
manu_list.append(value)
cores_list = []
for core in cores:
for value in core.values():
cores_list.append(value)
# Retrieve EC2 instance types.
rc, msg, sql_select = select_ec2_instance_types(config, active_user.active_group, active_user.kwargs['cloud_name'])
if rc != 0:
config.db_close()
return render(request, 'csv2/ec2_instance_types.html', {'response_code': 1, 'message': '%s ec2 instance-types, %s' % (lno(MODID), msg)})
rc, msg = config.db_execute(sql_select)
ec2_instance_types = []
for row in config.db_cursor:
ec2_instance_types.append(row)
config.db_close()
# Render the page.
context = {
'active_user': active_user.username,
'active_group': active_user.active_group,
'user_groups': active_user.user_groups,
'ec2_instance_type_filters': ec2_instance_type_filters,
'ec2_instance_type_filters_json': ec2_instance_type_filters_json,
'ec2_instance_types': ec2_instance_types,
'families': families,
'operating_systems': operating_systems,
'processors': processors,
'manufacturers': manufacturers,
'cores': cores,
'families_list': families_list,
'os_list': os_list,
'proc_list': proc_list,
'manu_list': manu_list,
'cores_list': cores_list,
'response_code': response_code,
'message': message,
'is_superuser': active_user.is_superuser,
'version': config.get_version()
}
return render(request, 'csv2/ec2_instance_types.html', context)
|
|
from __future__ import unicode_literals
import re
from datetime import date, datetime, timedelta
from django import template
from django.conf import settings
from django.template import defaultfilters
from django.utils.encoding import force_unicode
from django.utils.formats import number_format
from django.utils.translation import pgettext, ungettext, ugettext as _
from django.utils.timezone import is_aware, utc
register = template.Library()
@register.filter(is_safe=True)
def ordinal(value):
"""
Converts an integer to its ordinal as a string. 1 is '1st', 2 is '2nd',
3 is '3rd', etc. Works for any integer.
"""
try:
value = int(value)
except (TypeError, ValueError):
return value
suffixes = (_('th'), _('st'), _('nd'), _('rd'), _('th'), _('th'), _('th'), _('th'), _('th'), _('th'))
if value % 100 in (11, 12, 13): # special case
return "%d%s" % (value, suffixes[0])
return "%d%s" % (value, suffixes[value % 10])
@register.filter(is_safe=True)
def intcomma(value, use_l10n=True):
"""
Converts an integer to a string containing commas every three digits.
For example, 3000 becomes '3,000' and 45000 becomes '45,000'.
"""
if settings.USE_L10N and use_l10n:
try:
if not isinstance(value, float):
value = int(value)
except (TypeError, ValueError):
return intcomma(value, False)
else:
return number_format(value, force_grouping=True)
orig = force_unicode(value)
new = re.sub("^(-?\d+)(\d{3})", '\g<1>,\g<2>', orig)
if orig == new:
return new
else:
return intcomma(new, use_l10n)
# A tuple of standard large number to their converters
intword_converters = (
(6, lambda number: (
ungettext('%(value).1f million', '%(value).1f million', number),
ungettext('%(value)s million', '%(value)s million', number),
)),
(9, lambda number: (
ungettext('%(value).1f billion', '%(value).1f billion', number),
ungettext('%(value)s billion', '%(value)s billion', number),
)),
(12, lambda number: (
ungettext('%(value).1f trillion', '%(value).1f trillion', number),
ungettext('%(value)s trillion', '%(value)s trillion', number),
)),
(15, lambda number: (
ungettext('%(value).1f quadrillion', '%(value).1f quadrillion', number),
ungettext('%(value)s quadrillion', '%(value)s quadrillion', number),
)),
(18, lambda number: (
ungettext('%(value).1f quintillion', '%(value).1f quintillion', number),
ungettext('%(value)s quintillion', '%(value)s quintillion', number),
)),
(21, lambda number: (
ungettext('%(value).1f sextillion', '%(value).1f sextillion', number),
ungettext('%(value)s sextillion', '%(value)s sextillion', number),
)),
(24, lambda number: (
ungettext('%(value).1f septillion', '%(value).1f septillion', number),
ungettext('%(value)s septillion', '%(value)s septillion', number),
)),
(27, lambda number: (
ungettext('%(value).1f octillion', '%(value).1f octillion', number),
ungettext('%(value)s octillion', '%(value)s octillion', number),
)),
(30, lambda number: (
ungettext('%(value).1f nonillion', '%(value).1f nonillion', number),
ungettext('%(value)s nonillion', '%(value)s nonillion', number),
)),
(33, lambda number: (
ungettext('%(value).1f decillion', '%(value).1f decillion', number),
ungettext('%(value)s decillion', '%(value)s decillion', number),
)),
(100, lambda number: (
ungettext('%(value).1f googol', '%(value).1f googol', number),
ungettext('%(value)s googol', '%(value)s googol', number),
)),
)
@register.filter(is_safe=False)
def intword(value):
"""
Converts a large integer to a friendly text representation. Works best
for numbers over 1 million. For example, 1000000 becomes '1.0 million',
1200000 becomes '1.2 million' and '1200000000' becomes '1.2 billion'.
"""
try:
value = int(value)
except (TypeError, ValueError):
return value
if value < 1000000:
return value
def _check_for_i18n(value, float_formatted, string_formatted):
"""
Use the i18n enabled defaultfilters.floatformat if possible
"""
if settings.USE_L10N:
value = defaultfilters.floatformat(value, 1)
template = string_formatted
else:
template = float_formatted
return template % {'value': value}
for exponent, converters in intword_converters:
large_number = 10 ** exponent
if value < large_number * 1000:
new_value = value / float(large_number)
return _check_for_i18n(new_value, *converters(new_value))
return value
@register.filter(is_safe=True)
def apnumber(value):
"""
For numbers 1-9, returns the number spelled out. Otherwise, returns the
number. This follows Associated Press style.
"""
try:
value = int(value)
except (TypeError, ValueError):
return value
if not 0 < value < 10:
return value
return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value-1]
@register.filter
def naturalday(value, arg=None):
"""
For date values that are tomorrow, today or yesterday compared to
present day returns representing string. Otherwise, returns a string
formatted according to settings.DATE_FORMAT.
"""
try:
tzinfo = getattr(value, 'tzinfo', None)
value = date(value.year, value.month, value.day)
except AttributeError:
# Passed value wasn't a date object
return value
except ValueError:
# Date arguments out of range
return value
today = datetime.now(tzinfo).date()
delta = value - today
if delta.days == 0:
return _('today')
elif delta.days == 1:
return _('tomorrow')
elif delta.days == -1:
return _('yesterday')
return defaultfilters.date(value, arg)
@register.filter
def naturaltime(value):
"""
For date and time values shows how many seconds, minutes or hours ago
compared to current timestamp returns representing string.
"""
if not isinstance(value, date): # datetime is a subclass of date
return value
now = datetime.now(utc if is_aware(value) else None)
if value < now:
delta = now - value
if delta.days != 0:
return pgettext(
'naturaltime', '%(delta)s ago'
) % {'delta': defaultfilters.timesince(value)}
elif delta.seconds == 0:
return _('now')
elif delta.seconds < 60:
return ungettext(
'a second ago', '%(count)s seconds ago', delta.seconds
) % {'count': delta.seconds}
elif delta.seconds // 60 < 60:
count = delta.seconds // 60
return ungettext(
'a minute ago', '%(count)s minutes ago', count
) % {'count': count}
else:
count = delta.seconds // 60 // 60
return ungettext(
'an hour ago', '%(count)s hours ago', count
) % {'count': count}
else:
delta = value - now
if delta.days != 0:
return pgettext(
'naturaltime', '%(delta)s from now'
) % {'delta': defaultfilters.timeuntil(value)}
elif delta.seconds == 0:
return _('now')
elif delta.seconds < 60:
return ungettext(
'a second from now', '%(count)s seconds from now', delta.seconds
) % {'count': delta.seconds}
elif delta.seconds // 60 < 60:
count = delta.seconds // 60
return ungettext(
'a minute from now', '%(count)s minutes from now', count
) % {'count': count}
else:
count = delta.seconds // 60 // 60
return ungettext(
'an hour from now', '%(count)s hours from now', count
) % {'count': count}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.